You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2018/06/25 04:40:26 UTC

[28/33] hive git commit: HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
index 7d3c3f4..cffd10b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
@@ -17,15 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
@@ -38,7 +37,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 @NDV(maxNdv = 1)
 public class GenericUDFCurrentDate extends GenericUDF {
 
-  protected DateWritable currentDate;
+  protected DateWritableV2 currentDate;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
@@ -52,7 +51,7 @@ public class GenericUDFCurrentDate extends GenericUDF {
     if (currentDate == null) {
       Date dateVal =
           Date.valueOf(SessionState.get().getQueryCurrentTimestamp().toString().substring(0, 10));
-      currentDate = new DateWritable(dateVal);
+      currentDate = new DateWritableV2(dateVal);
     }
 
     return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
@@ -63,11 +62,11 @@ public class GenericUDFCurrentDate extends GenericUDF {
     return currentDate;
   }
 
-  public DateWritable getCurrentDate() {
+  public DateWritableV2 getCurrentDate() {
     return currentDate;
   }
 
-  public void setCurrentDate(DateWritable currentDate) {
+  public void setCurrentDate(DateWritableV2 currentDate) {
     this.currentDate = currentDate;
   }
 
@@ -83,7 +82,7 @@ public class GenericUDFCurrentDate extends GenericUDF {
     // Need to preserve currentDate
     GenericUDFCurrentDate other = (GenericUDFCurrentDate) newInstance;
     if (this.currentDate != null) {
-      other.currentDate = new DateWritable(this.currentDate);
+      other.currentDate = new DateWritableV2(this.currentDate);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
index 9da51c8..d9447f1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
@@ -17,13 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
@@ -36,7 +37,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 @NDV(maxNdv = 1)
 public class GenericUDFCurrentTimestamp extends GenericUDF {
 
-  protected TimestampWritable currentTimestamp;
+  protected TimestampWritableV2 currentTimestamp;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
@@ -48,7 +49,9 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     }
 
     if (currentTimestamp == null) {
-      currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp());
+      java.sql.Timestamp ts = SessionState.get().getQueryCurrentTimestamp();
+      currentTimestamp = new TimestampWritableV2(
+          Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
     }
 
     return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
@@ -59,11 +62,11 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     return currentTimestamp;
   }
 
-  public TimestampWritable getCurrentTimestamp() {
+  public TimestampWritableV2 getCurrentTimestamp() {
     return currentTimestamp;
   }
 
-  public void setCurrentTimestamp(TimestampWritable currentTimestamp) {
+  public void setCurrentTimestamp(TimestampWritableV2 currentTimestamp) {
     this.currentTimestamp = currentTimestamp;
   }
 
@@ -78,7 +81,7 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     // Need to preserve currentTimestamp
     GenericUDFCurrentTimestamp other = (GenericUDFCurrentTimestamp) newInstance;
     if (this.currentTimestamp != null) {
-      other.currentTimestamp = new TimestampWritable(this.currentTimestamp);
+      other.currentTimestamp = new TimestampWritableV2(this.currentTimestamp);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index b73893d..f5c4eb5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -17,9 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -28,8 +27,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -57,8 +56,8 @@ public class GenericUDFDate extends GenericUDF {
   private transient PrimitiveCategory inputType;
   private transient PrimitiveObjectInspector argumentOI;
   private transient DateParser dateParser = new DateParser();
-  private transient final DateWritable output = new DateWritable();
-  private transient final Date date = new Date(0);
+  private transient final DateWritableV2 output = new DateWritableV2();
+  private transient final Date date = new Date();
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
@@ -117,13 +116,13 @@ public class GenericUDFDate extends GenericUDF {
       }
       break;
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
+      Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get()))
           .getTimestamp();
-      output.set(DateWritable.millisToDays(ts.getTime()));
+      output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
       break;
     case TIMESTAMPLOCALTZ:
     case DATE:
-      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
+      DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
       output.set(dw);
       break;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
index 8ba103b..be7bd17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
@@ -17,9 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -30,9 +29,9 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddColScal
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddScalarCol;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -65,12 +64,12 @@ import org.apache.hive.common.util.DateParser;
 @VectorizedExpressions({VectorUDFDateAddColScalar.class, VectorUDFDateAddScalarCol.class, VectorUDFDateAddColCol.class})
 public class GenericUDFDateAdd extends GenericUDF {
   private transient final DateParser dateParser = new DateParser();
-  private transient final Date dateVal = new Date(0);
+  private transient final Date dateVal = new Date();
   private transient Converter dateConverter;
   private transient Converter daysConverter;
   private transient PrimitiveCategory inputType1;
   private transient PrimitiveCategory inputType2;
-  private final DateWritable output = new DateWritable();
+  private final DateWritableV2 output = new DateWritableV2();
   protected int signModifier = 1;  // 1 for addition, -1 for subtraction
 
   @Override
@@ -163,7 +162,7 @@ public class GenericUDFDateAdd extends GenericUDF {
       return null;
     }
 
-    // Convert the first param into a DateWritable value
+    // Convert the first param into a DateWritableV2 value
     switch (inputType1) {
     case STRING:
       String dateString = dateConverter.convert(arguments[0].get()).toString();
@@ -174,12 +173,12 @@ public class GenericUDFDateAdd extends GenericUDF {
       }
       break;
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get()))
+      Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get()))
         .getTimestamp();
-      output.set(DateWritable.millisToDays(ts.getTime()));
+      output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
       break;
     case DATE:
-      DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
+      DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
       output.set(dw.getDays());
       break;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
index e9cbcf7..00386c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
@@ -17,11 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -32,9 +29,9 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffColCol
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffColScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffScalarCol;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -43,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.Pr
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.io.IntWritable;
 
 import javax.annotation.Nullable;
@@ -65,7 +63,6 @@ import javax.annotation.Nullable;
         + "  1")
 @VectorizedExpressions({VectorUDFDateDiffColScalar.class, VectorUDFDateDiffColCol.class, VectorUDFDateDiffScalarCol.class})
 public class GenericUDFDateDiff extends GenericUDF {
-  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient Converter inputConverter1;
   private transient Converter inputConverter2;
   private IntWritable output = new IntWritable();
@@ -116,21 +113,25 @@ public class GenericUDFDateDiff extends GenericUDF {
     case CHAR:
       String dateString = converter.convert(argument.get()).toString();
       try {
-        return new Date(formatter.parse(dateString).getTime());
-      } catch (ParseException e) {
+        return Date.valueOf(dateString);
+      } catch (IllegalArgumentException e) {
+        Timestamp ts = PrimitiveObjectInspectorUtils.getTimestampFromString(dateString);
+        if (ts != null) {
+          return Date.ofEpochMilli(ts.toEpochMilli());
+        }
         return null;
       }
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) converter.convert(argument.get()))
+      Timestamp ts = ((TimestampWritableV2) converter.convert(argument.get()))
         .getTimestamp();
-      return new Date(ts.getTime());
+      return Date.ofEpochMilli(ts.toEpochMilli());
     case DATE:
-      DateWritable dw = (DateWritable) converter.convert(argument.get());
+      DateWritableV2 dw = (DateWritableV2) converter.convert(argument.get());
       return dw.get();
     case TIMESTAMPLOCALTZ:
       TimestampTZ tsz = ((TimestampLocalTZWritable) converter.convert(argument.get()))
           .getTimestampTZ();
-      return new Date(tsz.getEpochSecond() * 1000l);
+      return Date.ofEpochMilli(tsz.getEpochSecond() * 1000l);
     default:
       throw new UDFArgumentException(
         "TO_DATE() only takes STRING/TIMESTAMP/TIMESTAMPLOCALTZ types, got " + inputType);
@@ -175,7 +176,7 @@ public class GenericUDFDateDiff extends GenericUDF {
       return null;
     }
 
-    result.set(DateWritable.dateToDays(date) - DateWritable.dateToDays(date2));
+    result.set(DateWritableV2.dateToDays(date) - DateWritableV2.dateToDays(date2));
     return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
index 6b775d6..6d3e86f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
@@ -21,8 +21,10 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 
 import java.text.SimpleDateFormat;
-import java.util.Date;
+import java.util.TimeZone;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -44,14 +46,15 @@ import org.apache.hadoop.io.Text;
 @Description(name = "date_format", value = "_FUNC_(date/timestamp/string, fmt) - converts a date/timestamp/string "
     + "to a value of string in the format specified by the date format fmt.",
     extended = "Supported formats are SimpleDateFormat formats - "
-    + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
-    + "Second argument fmt should be constant.\n"
-    + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
+        + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
+        + "Second argument fmt should be constant.\n"
+        + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
 public class GenericUDFDateFormat extends GenericUDF {
   private transient Converter[] tsConverters = new Converter[2];
   private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
   private transient Converter[] dtConverters = new Converter[2];
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
+  private final java.util.Date date = new java.util.Date();
   private final Text output = new Text();
   private transient SimpleDateFormat formatter;
 
@@ -77,6 +80,7 @@ public class GenericUDFDateFormat extends GenericUDF {
       if (fmtStr != null) {
         try {
           formatter = new SimpleDateFormat(fmtStr);
+          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
         } catch (IllegalArgumentException e) {
           // ignore
         }
@@ -97,14 +101,16 @@ public class GenericUDFDateFormat extends GenericUDF {
     }
     // the function should support both short date and full timestamp format
     // time part of the timestamp should not be skipped
-    Date date = getTimestampValue(arguments, 0, tsConverters);
-    if (date == null) {
-      date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
-      if (date == null) {
+    Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
+    if (ts == null) {
+      Date d = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+      if (d == null) {
         return null;
       }
+      ts = Timestamp.ofEpochMilli(d.toEpochMilli());
     }
 
+    date.setTime(ts.toEpochMilli());
     String res = formatter.format(date);
     if (res == null) {
       return null;
@@ -122,4 +128,4 @@ public class GenericUDFDateFormat extends GenericUDF {
   protected String getFuncName() {
     return "date_format";
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
index eaab703..bcc4114 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.text.SimpleDateFormat;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubScalarCol;
 
-import java.text.SimpleDateFormat;
-
 /**
  * UDFDateSub.
  *

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
index 8691ed1..67aec82 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
@@ -17,23 +17,23 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
 import java.util.TimeZone;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Description(name = "from_utc_timestamp",
              value = "from_utc_timestamp(timestamp, string timezone) - "
@@ -45,7 +45,6 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
   private transient PrimitiveObjectInspector[] argumentOIs;
   private transient TimestampConverter timestampConverter;
   private transient TextConverter textConverter;
-  private transient SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private transient TimeZone tzUTC = TimeZone.getTimeZone("UTC");
 
   @Override
@@ -70,26 +69,6 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
     return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
   }
 
-  /**
-   * Parse the timestamp string using the input TimeZone.
-   * This does not parse fractional seconds.
-   * @param tsString
-   * @param tz
-   * @return
-   */
-  protected Timestamp timestampFromString(String tsString, TimeZone tz) {
-    dateFormat.setTimeZone(tz);
-    try {
-      java.util.Date date = dateFormat.parse(tsString);
-      if (date == null) {
-        return null;
-      }
-      return new Timestamp(date.getTime());
-    } catch (ParseException err) {
-      return null;
-    }
-  }
-
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     Object o0 = arguments[0].get();
@@ -106,7 +85,7 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
       return null;
     }
 
-    Timestamp inputTs = ((TimestampWritable) converted_o0).getTimestamp();
+    Timestamp inputTs = ((TimestampWritableV2) converted_o0).getTimestamp();
 
     String tzStr = textConverter.convert(o1).toString();
     TimeZone timezone = TimeZone.getTimeZone(tzStr);
@@ -123,21 +102,15 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
 
     // inputTs is the year/month/day/hour/minute/second in the local timezone.
     // For this UDF we want it in the timezone represented by fromTz
-    Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz);
+    TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId());
     if (fromTs == null) {
       return null;
     }
 
     // Now output this timestamp's millis value to the equivalent toTz.
-    dateFormat.setTimeZone(toTz);
-    Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs));
-
-    if (inputTs.getNanos() != 0) {
-      result.setNanos(inputTs.getNanos());
-    }
-
+    Timestamp result = Timestamp.valueOf(
+        fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime().toString());
     return result;
-
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
index d739af9..733fe63 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
@@ -20,13 +20,14 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorInBloomFilterColDynamicValue;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,10 +38,8 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.BloomKFilter;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.sql.Timestamp;
 
 /**
  * GenericUDF to lookup a value in BloomFilter
@@ -147,13 +146,13 @@ public class GenericUDFInBloomFilter extends GenericUDF {
         int startIdx = vDecimal.toBytes(scratchBuffer);
         return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
       case DATE:
-        DateWritable vDate = ((DateObjectInspector) valObjectInspector).
+        DateWritableV2 vDate = ((DateObjectInspector) valObjectInspector).
                 getPrimitiveWritableObject(arguments[0].get());
         return bloomFilter.testLong(vDate.getDays());
       case TIMESTAMP:
         Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).
                 getPrimitiveJavaObject(arguments[0].get());
-        return bloomFilter.testLong(vTimeStamp.getTime());
+        return bloomFilter.testLong(vTimeStamp.toEpochMilli());
       case CHAR:
         Text vChar = ((HiveCharObjectInspector) valObjectInspector).
                 getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
index 238eff9..dceace5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
@@ -20,9 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
-import java.util.Calendar;
-import java.util.Date;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -31,7 +30,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.C
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
 
 /**
  * GenericUDFLastDay.
@@ -48,7 +46,7 @@ import org.apache.hive.common.util.DateUtils;
 public class GenericUDFLastDay extends GenericUDF {
   private transient Converter[] converters = new Converter[1];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
-  private final Calendar calendar = Calendar.getInstance();
+  private final Date date = new Date();
   private final Text output = new Text();
 
   @Override
@@ -67,14 +65,13 @@ public class GenericUDFLastDay extends GenericUDF {
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    Date date = getDateValue(arguments, 0, inputTypes, converters);
-    if (date == null) {
+    Date d = getDateValue(arguments, 0, inputTypes, converters);
+    if (d == null) {
       return null;
     }
 
-    lastDay(date);
-    Date newDate = calendar.getTime();
-    output.set(DateUtils.getDateFormat().format(newDate));
+    lastDay(d);
+    output.set(date.toString());
     return output;
   }
 
@@ -88,10 +85,9 @@ public class GenericUDFLastDay extends GenericUDF {
     return "last_day";
   }
 
-  protected Calendar lastDay(Date d) {
-    calendar.setTime(d);
-    int maxDd = calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
-    calendar.set(Calendar.DAY_OF_MONTH, maxDd);
-    return calendar;
+  protected Date lastDay(Date d) {
+    date.setTimeInDays(d.toEpochDay());
+    date.setDayOfMonth(date.lengthOfMonth());
+    return date;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
index bf2ec82..27c3bf8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
@@ -19,8 +19,7 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -218,11 +217,12 @@ class MaskTransformer extends AbstractTransformer {
 
   @Override
   Date transform(final Date value) {
+    int actualMonthValue = maskedMonthValue + 1;
     int year  = maskedYearValue  == UNMASKED_VAL ? value.getYear()  : maskedYearValue;
-    int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : maskedMonthValue;
-    int day   = maskedDayValue   == UNMASKED_VAL ? value.getDate()  : maskedDayValue;
+    int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : actualMonthValue;
+    int day   = maskedDayValue   == UNMASKED_VAL ? value.getDay()  : maskedDayValue;
 
-    return new Date(year, month, day);
+    return Date.of(year, month, day);
   }
 
   protected int transformChar(final int c) {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
index 8b1e988..a068541 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
@@ -18,9 +18,8 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-
 import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
index d04e135..e0db417 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
@@ -29,8 +29,10 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 
 import java.math.BigDecimal;
 import java.util.Calendar;
-import java.util.Date;
+import java.util.TimeZone;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -59,12 +61,13 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
     + " Example:\n"
     + "  > SELECT _FUNC_('1997-02-28 10:30:00', '1996-10-30');\n 3.94959677")
 public class GenericUDFMonthsBetween extends GenericUDF {
+
   private transient Converter[] tsConverters = new Converter[2];
   private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
   private transient Converter[] dtConverters = new Converter[2];
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
-  private final Calendar cal1 = Calendar.getInstance();
-  private final Calendar cal2 = Calendar.getInstance();
+  private final Calendar cal1 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar cal2 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
   private final DoubleWritable output = new DoubleWritable();
   private boolean isRoundOffNeeded = true;
 
@@ -103,31 +106,33 @@ public class GenericUDFMonthsBetween extends GenericUDF {
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     // the function should support both short date and full timestamp format
     // time part of the timestamp should not be skipped
-    Date date1 = getTimestampValue(arguments, 0, tsConverters);
+    Timestamp date1 = getTimestampValue(arguments, 0, tsConverters);
     if (date1 == null) {
-      date1 = getDateValue(arguments, 0, dtInputTypes, dtConverters);
-      if (date1 == null) {
+      Date date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+      if (date == null) {
         return null;
       }
+      date1 = Timestamp.ofEpochMilli(date.toEpochMilli());
     }
 
-    Date date2 = getTimestampValue(arguments, 1, tsConverters);
+    Timestamp date2 = getTimestampValue(arguments, 1, tsConverters);
     if (date2 == null) {
-      date2 = getDateValue(arguments, 1, dtInputTypes, dtConverters);
-      if (date2 == null) {
+      Date date = getDateValue(arguments, 1, dtInputTypes, dtConverters);
+      if (date == null) {
         return null;
       }
+      date2 = Timestamp.ofEpochMilli(date.toEpochMilli());
     }
 
-    cal1.setTime(date1);
-    cal2.setTime(date2);
+    cal1.setTimeInMillis(date1.toEpochMilli());
+    cal2.setTimeInMillis(date2.toEpochMilli());
 
     // skip day/time part if both dates are end of the month
     // or the same day of the month
     int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH));
     if (cal1.get(DATE) == cal2.get(DATE)
         || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2
-            .getActualMaximum(DATE))) {
+        .getActualMaximum(DATE))) {
       output.set(monDiffInt);
       return output;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
index e74bae3..c700797 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
@@ -28,9 +28,7 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
 
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -40,7 +38,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.C
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
+
+import java.util.Calendar;
 
 /**
  * GenericUDFNextDay.
@@ -55,9 +54,10 @@ import org.apache.hive.common.util.DateUtils;
         + " 'yyyy-MM-dd'. day_of_week is day of the week (e.g. Mo, tue, FRIDAY)."
         + "Example:\n " + " > SELECT _FUNC_('2015-01-14', 'TU') FROM src LIMIT 1;\n" + " '2015-01-20'")
 public class GenericUDFNextDay extends GenericUDF {
+
   private transient Converter[] converters = new Converter[2];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
-  private final Calendar calendar = Calendar.getInstance();
+  private final Date date = new Date();
   private final Text output = new Text();
   private transient int dayOfWeekIntConst;
   private transient boolean isDayOfWeekConst;
@@ -98,14 +98,13 @@ public class GenericUDFNextDay extends GenericUDF {
       return null;
     }
 
-    Date date = getDateValue(arguments, 0, inputTypes, converters);
-    if (date == null) {
+    Date d = getDateValue(arguments, 0, inputTypes, converters);
+    if (d == null) {
       return null;
     }
 
-    nextDay(date, dayOfWeekInt);
-    Date newDate = calendar.getTime();
-    output.set(DateUtils.getDateFormat().format(newDate));
+    nextDay(d, dayOfWeekInt);
+    output.set(date.toString());
     return output;
   }
 
@@ -119,10 +118,10 @@ public class GenericUDFNextDay extends GenericUDF {
     return "next_day";
   }
 
-  protected Calendar nextDay(Date date, int dayOfWeek) {
-    calendar.setTime(date);
+  protected Date nextDay(Date d, int dayOfWeek) {
+    date.setTimeInDays(d.toEpochDay());
 
-    int currDayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
+    int currDayOfWeek = date.getDayOfWeek();
 
     int daysToAdd;
     if (currDayOfWeek < dayOfWeek) {
@@ -131,9 +130,9 @@ public class GenericUDFNextDay extends GenericUDF {
       daysToAdd = 7 - currDayOfWeek + dayOfWeek;
     }
 
-    calendar.add(Calendar.DATE, daysToAdd);
+    date.setTimeInDays(date.toEpochDay() + daysToAdd);
 
-    return calendar;
+    return date;
   }
 
   protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
@@ -164,6 +163,7 @@ public class GenericUDFNextDay extends GenericUDF {
     return -1;
   }
 
+
   public static enum DayOfWeek {
     MON("MO", "MON", "MONDAY"), TUE("TU", "TUE", "TUESDAY"), WED("WE", "WED", "WEDNESDAY"), THU(
         "TH", "THU", "THURSDAY"), FRI("FR", "FRI", "FRIDAY"), SAT("SA", "SAT", "SATURDAY"), SUN(
@@ -201,4 +201,5 @@ public class GenericUDFNextDay extends GenericUDF {
       return fullName.equalsIgnoreCase(dayOfWeek);
     }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
index e1673b2..076ca51 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -58,8 +58,8 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
   protected transient Converter dt1Converter;
   protected transient Converter dt2Converter;
 
-  protected transient DateWritable dateResult = new DateWritable();
-  protected transient TimestampWritable timestampResult = new TimestampWritable();
+  protected transient DateWritableV2 dateResult = new DateWritableV2();
+  protected transient TimestampWritableV2 timestampResult = new TimestampWritableV2();
   protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
       new HiveIntervalYearMonthWritable();
   protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
@@ -222,7 +222,7 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
     }
   }
 
-  protected DateWritable handleDateResult(Date result) {
+  protected DateWritableV2 handleDateResult(Date result) {
     if (result == null) {
       return null;
     }
@@ -230,7 +230,7 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
     return dateResult;
   }
 
-  protected TimestampWritable handleTimestampResult(Timestamp result) {
+  protected TimestampWritableV2 handleTimestampResult(Timestamp result) {
     if (result == null) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
index a57b373..9295c8f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -56,8 +56,8 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
   protected transient int dtArgIdx;
   protected transient Converter dtConverter;
 
-  protected transient TimestampWritable timestampResult = new TimestampWritable();
-  protected transient DateWritable dateResult = new DateWritable();
+  protected transient TimestampWritableV2 timestampResult = new TimestampWritableV2();
+  protected transient DateWritableV2 dateResult = new DateWritableV2();
   protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
       new HiveIntervalDayTimeWritable();
   protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
@@ -217,7 +217,7 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
     }
   }
 
-  protected DateWritable handleDateResult(Date result) {
+  protected DateWritableV2 handleDateResult(Date result) {
     if (result == null) {
       return null;
     }
@@ -225,7 +225,7 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
     return dateResult;
   }
 
-  protected TimestampWritable handleTimestampResult(Timestamp result) {
+  protected TimestampWritableV2 handleTimestampResult(Timestamp result) {
     if (result == null) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
index 2406868..cf8c26a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
@@ -21,9 +21,7 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
 
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -45,7 +43,6 @@ import org.apache.hadoop.io.IntWritable;
 public class GenericUDFQuarter extends GenericUDF {
   private transient Converter[] converters = new Converter[1];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
-  private final Calendar calendar = Calendar.getInstance();
   private final IntWritable output = new IntWritable();
 
   @Override
@@ -65,8 +62,7 @@ public class GenericUDFQuarter extends GenericUDF {
     if (date == null) {
       return null;
     }
-    calendar.setTime(date);
-    int month = calendar.get(Calendar.MONTH);
+    int month = date.getMonth() - 1;
     int quarter = (month + 3) / 3;
 
     output.set(quarter);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
index f0fcf69..4fca5d5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -163,7 +163,7 @@ public class GenericUDFReflect2 extends AbstractGenericUDFReflect {
         ((Text)returnObj).set((String)result);
         return returnObj;
       case TIMESTAMP:
-        ((TimestampWritable)returnObj).set((Timestamp)result);
+        ((TimestampWritableV2)returnObj).set((Timestamp)result);
         return returnObj;
       case BINARY:
         ((BytesWritable)returnObj).set((byte[])result, 0, ((byte[]) result).length);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
index 5b55402..4d0e85d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
  * Example usage:
  * ... CAST(<Timestamp string> as TIMESTAMP) ...
  *
- * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter
+ * Creates a TimestampWritableV2 object using PrimitiveObjectInspectorConverter
  *
  */
 @Description(name = "timestamp",

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
index 53dfae2..3c3796e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
+import java.util.TimeZone;
 
-import org.apache.calcite.util.TimestampWithTimeZoneString;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -41,12 +41,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
 import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
 
 /**
  * deterministic version of UDFUnixTimeStamp. enforces argument
@@ -84,6 +82,8 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF {
       }
     }
 
+    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+
     PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
     switch (arg1OI.getPrimitiveCategory()) {
       case CHAR:
@@ -171,7 +171,7 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF {
   }
 
   protected static void setValueFromTs(LongWritable value, Timestamp timestamp) {
-    value.set(timestamp.getTime() / 1000);
+    value.set(timestamp.toEpochSecond());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index 372db36..7a7d13e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -20,24 +20,21 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.math.BigDecimal;
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -77,7 +74,6 @@ import org.apache.hadoop.io.Text;
         + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
 public class GenericUDFTrunc extends GenericUDF {
 
-  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient TimestampConverter timestampConverter;
   private transient Converter textConverter1;
   private transient Converter textConverter2;
@@ -88,7 +84,7 @@ public class GenericUDFTrunc extends GenericUDF {
   private transient Converter longConverter;
   private transient PrimitiveCategory inputType1;
   private transient PrimitiveCategory inputType2;
-  private final Calendar calendar = Calendar.getInstance();
+  private final Date date = new Date();
   private final Text output = new Text();
   private transient String fmtInput;
   private transient PrimitiveObjectInspector inputOI;
@@ -297,36 +293,35 @@ public class GenericUDFTrunc extends GenericUDF {
       fmtInput = textConverter2.convert(arguments[1].get()).toString();
     }
 
-    Date date;
+    Date d;
     switch (inputType1) {
     case STRING:
       String dateString = textConverter1.convert(arguments[0].get()).toString();
       try {
-        date = formatter.parse(dateString.toString());
-      } catch (ParseException e) {
+        d = Date.valueOf(dateString.toString());
+      } catch (IllegalArgumentException e) {
         return null;
       }
       break;
     case TIMESTAMP:
       Timestamp ts =
-          ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
-      date = ts;
+          ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
+      d = Date.ofEpochMilli(ts.toEpochMilli());
       break;
     case DATE:
-      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
-      date = dw.get();
+      DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
+      d = dw.get();
       break;
     default:
       throw new UDFArgumentTypeException(0,
           "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
     }
 
-    if (evalDate(date) == null) {
+    if (evalDate(d) == null) {
       return null;
     }
 
-    Date newDate = calendar.getTime();
-    output.set(formatter.format(newDate));
+    output.set(date.toString());
     return output;
   }
 
@@ -427,22 +422,22 @@ public class GenericUDFTrunc extends GenericUDF {
     return getStandardDisplayString("trunc", children);
   }
 
-  private Calendar evalDate(Date d) throws UDFArgumentException {
-    calendar.setTime(d);
+  private Date evalDate(Date d) throws UDFArgumentException {
+    date.setTimeInDays(d.toEpochDay());
     if ("MONTH".equals(fmtInput) || "MON".equals(fmtInput) || "MM".equals(fmtInput)) {
-      calendar.set(Calendar.DAY_OF_MONTH, 1);
-      return calendar;
+      date.setDayOfMonth(1);
+      return date;
     } else if ("QUARTER".equals(fmtInput) || "Q".equals(fmtInput)) {
-      int month = calendar.get(Calendar.MONTH);
+      int month = date.getMonth() - 1;
       int quarter = month / 3;
-      int monthToSet = quarter * 3;
-      calendar.set(Calendar.MONTH, monthToSet);
-      calendar.set(Calendar.DAY_OF_MONTH, 1);
-      return calendar;
+      int monthToSet = quarter * 3 + 1;
+      date.setMonth(monthToSet);
+      date.setDayOfMonth(1);
+      return date;
     } else if ("YEAR".equals(fmtInput) || "YYYY".equals(fmtInput) || "YY".equals(fmtInput)) {
-      calendar.set(Calendar.MONTH, 0);
-      calendar.set(Calendar.DAY_OF_MONTH, 1);
-      return calendar;
+      date.setMonth(1);
+      date.setDayOfMonth(1);
+      return date;
     } else {
       return null;
     }
@@ -485,5 +480,5 @@ public class GenericUDFTrunc extends GenericUDF {
     }
     return output;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
index 8329831..557ab79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -44,7 +45,7 @@ public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp {
     } else {
       if (currentTimestamp == null) {
         currentTimestamp = new LongWritable(0);
-        setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp());
+        setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().getTime()));
         String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead.";
         SessionState.getConsole().printInfo(msg, false);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
index b440d8d..b34c4d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
@@ -18,10 +18,9 @@
 
 package org.apache.hadoop.hive.ql.udf.ptf;
 
-import java.sql.Timestamp;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -559,7 +558,7 @@ class DateValueBoundaryScanner extends SingleValueBoundaryScanner {
     Date l2 = PrimitiveObjectInspectorUtils.getDate(v2,
         (PrimitiveObjectInspector) expressionDef.getOI());
     if (l1 != null && l2 != null) {
-        return (double)(l1.getTime() - l2.getTime())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
+        return (double)(l1.toEpochMilli() - l2.toEpochMilli())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
     }
     return l1 != l2; // True if only one date is null
   }
@@ -583,9 +582,9 @@ class TimestampValueBoundaryScanner extends SingleValueBoundaryScanner {
   public boolean isDistanceGreater(Object v1, Object v2, int amt) {
     if (v1 != null && v2 != null) {
       long l1 = PrimitiveObjectInspectorUtils.getTimestamp(v1,
-          (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
+          (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
       long l2 = PrimitiveObjectInspectorUtils.getTimestamp(v2,
-          (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
+          (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
       return (double)(l1-l2)/1000 > amt; // TODO: lossy conversion, distance is considered in seconds
     }
     return v1 != null || v2 != null; // True if only one value is null

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
index 9a097af..16babbf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.hive.ql.util;
 
-import java.sql.Date;
-import java.sql.Timestamp;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hive.common.util.DateUtils;
+
 import java.util.Calendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hive.common.util.DateUtils;
-
 
 public class DateTimeMath {
 
@@ -49,7 +49,6 @@ public class DateTimeMath {
   }
 
   protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
-  protected Calendar calLocal = Calendar.getInstance();
   protected NanosResult nanosResult = new NanosResult();
 
   //
@@ -62,39 +61,22 @@ public class DateTimeMath {
    * @param months
    * @return
    */
-  public long addMonthsToMillisUtc(long millis, int months) {
+  public long addMonthsToMillis(long millis, int months) {
     calUtc.setTimeInMillis(millis);
     calUtc.add(Calendar.MONTH, months);
     return calUtc.getTimeInMillis();
   }
 
-  /**
-   * Perform month arithmetic to millis value using local time zone.
-   * @param millis
-   * @param months
-   * @return
-   */
-  public long addMonthsToMillisLocal(long millis, int months) {
-    calLocal.setTimeInMillis(millis);
-    calLocal.add(Calendar.MONTH, months);
-    return calLocal.getTimeInMillis();
-  }
-
-  public long addMonthsToNanosUtc(long nanos, int months) {
-    long result = addMonthsToMillisUtc(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
-    return result;
-  }
-
-  public long addMonthsToNanosLocal(long nanos, int months) {
-    long result = addMonthsToMillisLocal(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
+  public long addMonthsToNanos(long nanos, int months) {
+    long result = addMonthsToMillis(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
     return result;
   }
 
   public long addMonthsToDays(long days, int months) {
-    long millis = DateWritable.daysToMillis((int) days);
-    millis = addMonthsToMillisLocal(millis, months);
+    long millis = DateWritableV2.daysToMillis((int) days);
+    millis = addMonthsToMillis(millis, months);
     // Convert millis result back to days
-    return DateWritable.millisToDays(millis);
+    return DateWritableV2.millisToDays(millis);
   }
 
   public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) {
@@ -102,7 +84,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp(0);
+    Timestamp tsResult = new Timestamp();
+    add(ts, interval, tsResult);
+
+    return tsResult;
+  }
+
+  @Deprecated
+  public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalYearMonth interval) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
     add(ts, interval, tsResult);
 
     return tsResult;
@@ -113,9 +107,21 @@ public class DateTimeMath {
       return false;
     }
 
+    long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
+    result.setTimeInMillis(resultMillis, ts.getNanos());
+
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(java.sql.Timestamp ts, HiveIntervalYearMonth interval, java.sql.Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
     // Attempt to match Oracle semantics for timestamp arithmetic,
     // where timestamp arithmetic is done in UTC, then converted back to local timezone
-    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+    long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     result.setNanos(ts.getNanos());
 
@@ -127,7 +133,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp(0);
+    Timestamp tsResult = new Timestamp();
+    add(interval, ts, tsResult);
+
+    return tsResult;
+  }
+
+  @Deprecated
+  public java.sql.Timestamp add(HiveIntervalYearMonth interval, java.sql.Timestamp ts) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
     add(interval, ts, tsResult);
 
     return tsResult;
@@ -138,9 +156,19 @@ public class DateTimeMath {
       return false;
     }
 
-    // Attempt to match Oracle semantics for timestamp arithmetic,
-    // where timestamp arithmetic is done in UTC, then converted back to local timezone
-    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+    long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
+    result.setTimeInMillis(resultMillis, ts.getNanos());
+
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(HiveIntervalYearMonth interval, java.sql.Timestamp ts, java.sql.Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
+    long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     result.setNanos(ts.getNanos());
 
@@ -152,7 +180,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Date dtResult = new Date(0);
+    Date dtResult = new Date();
+    add(dt, interval, dtResult);
+
+    return dtResult;
+  }
+
+  @Deprecated
+  public java.sql.Date add(java.sql.Date dt, HiveIntervalYearMonth interval) {
+    if (dt == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Date dtResult = new java.sql.Date(0);
     add(dt, interval, dtResult);
 
     return dtResult;
@@ -163,9 +203,18 @@ public class DateTimeMath {
       return false;
     }
 
-    // Since Date millis value is in local timezone representation, do date arithmetic
-    // using local timezone so the time remains at the start of the day.
-    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+    long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
+    result.setTimeInMillis(resultMillis);
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(java.sql.Date dt, HiveIntervalYearMonth interval, java.sql.Date result) {
+    if (dt == null || interval == null) {
+      return false;
+    }
+
+    long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     return true;
   }
@@ -175,7 +224,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Date dtResult = new Date(0);
+    Date dtResult = new Date();
+    add(interval, dt, dtResult);
+
+    return dtResult;
+  }
+
+  @Deprecated
+  public java.sql.Date add(HiveIntervalYearMonth interval, java.sql.Date dt) {
+    if (dt == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Date dtResult = new java.sql.Date(0);
     add(interval, dt, dtResult);
 
     return dtResult;
@@ -186,9 +247,18 @@ public class DateTimeMath {
       return false;
     }
 
-    // Since Date millis value is in local timezone representation, do date arithmetic
-    // using local timezone so the time remains at the start of the day.
-    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+    long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
+    result.setTimeInMillis(resultMillis);
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(HiveIntervalYearMonth interval, java.sql.Date dt, java.sql.Date result) {
+    if (dt == null || interval == null) {
+      return false;
+    }
+
+    long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     return true;
   }
@@ -208,7 +278,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp(0);
+    Timestamp tsResult = new Timestamp();
+    subtract(left, right, tsResult);
+
+    return tsResult;
+  }
+
+  @Deprecated
+  public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalYearMonth right) {
+    if (left == null || right == null) {
+      return null;
+    }
+
+    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
     subtract(left, right, tsResult);
 
     return tsResult;
@@ -221,12 +303,32 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
+  @Deprecated
+  public boolean subtract(java.sql.Timestamp left, HiveIntervalYearMonth right, java.sql.Timestamp result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
+  }
+
   public Date subtract(Date left, HiveIntervalYearMonth right) {
     if (left == null || right == null) {
       return null;
     }
 
-    Date dtResult = new Date(0);
+    Date dtResult = new Date();
+    subtract(left, right, dtResult);
+
+    return dtResult;
+  }
+
+  @Deprecated
+  public java.sql.Date subtract(java.sql.Date left, HiveIntervalYearMonth right) {
+    if (left == null || right == null) {
+      return null;
+    }
+
+    java.sql.Date dtResult = new java.sql.Date(0);
     subtract(left, right, dtResult);
 
     return dtResult;
@@ -239,6 +341,14 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
+  @Deprecated
+  public boolean subtract(java.sql.Date left, HiveIntervalYearMonth right, java.sql.Date result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
+  }
+
   public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
     if (left == null || right == null) {
       return null;
@@ -255,7 +365,19 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp(0);
+    Timestamp tsResult = new Timestamp();
+    add(ts, interval, tsResult);
+
+    return tsResult;
+  }
+
+  @Deprecated
+  public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalDayTime interval) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
     add(ts, interval, tsResult);
 
     return tsResult;
@@ -269,6 +391,21 @@ public class DateTimeMath {
 
     nanosResult.addNanos(ts.getNanos(), interval.getNanos());
 
+    long newMillis = ts.toEpochMilli()
+        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+    result.setTimeInMillis(newMillis, nanosResult.nanos);
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(java.sql.Timestamp ts, HiveIntervalDayTime interval,
+      java.sql.Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
+    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
     long newMillis = ts.getTime()
         + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
     result.setTime(newMillis);
@@ -281,7 +418,18 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp(0);
+    Timestamp tsResult = new Timestamp();
+    add(interval, ts, tsResult);
+    return tsResult;
+  }
+
+  @Deprecated
+  public java.sql.Timestamp add(HiveIntervalDayTime interval, java.sql.Timestamp ts) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
     add(interval, ts, tsResult);
     return tsResult;
   }
@@ -294,6 +442,21 @@ public class DateTimeMath {
 
     nanosResult.addNanos(ts.getNanos(), interval.getNanos());
 
+    long newMillis = ts.toEpochMilli()
+        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+    result.setTimeInMillis(newMillis, nanosResult.nanos);
+    return true;
+  }
+
+  @Deprecated
+  public boolean add(HiveIntervalDayTime interval, java.sql.Timestamp ts,
+      java.sql.Timestamp result) {
+    if (ts == null || interval == null) {
+      return false;
+    }
+
+    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
     long newMillis = ts.getTime()
         + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
     result.setTime(newMillis);
@@ -332,6 +495,14 @@ public class DateTimeMath {
     return add(left, right.negate());
   }
 
+  @Deprecated
+  public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalDayTime right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
   public boolean subtract(Timestamp left, HiveIntervalDayTime right, Timestamp result) {
     if (left == null || right == null) {
       return false;
@@ -339,6 +510,14 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
+  @Deprecated
+  public boolean subtract(java.sql.Timestamp left, HiveIntervalDayTime right, java.sql.Timestamp result) {
+    if (left == null || right == null) {
+      return false;
+    }
+    return add(left, right.negate(), result);
+  }
+
   public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) {
     if (left == null || right == null) {
       return null;
@@ -365,6 +544,18 @@ public class DateTimeMath {
     return result;
   }
 
+  @Deprecated
+  public HiveIntervalDayTime subtract(java.sql.Timestamp left, java.sql.Timestamp right) {
+    if (left == null || right == null) {
+      return null;
+    }
+
+    HiveIntervalDayTime result = new HiveIntervalDayTime();
+    subtract(left, right, result);
+
+    return result;
+  }
+
   public boolean subtract(Timestamp left, Timestamp right,
       HiveIntervalDayTime result) {
     if (left == null || right == null) {
@@ -373,6 +564,21 @@ public class DateTimeMath {
 
     nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
 
+    long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.toEpochMilli())
+        - TimeUnit.MILLISECONDS.toSeconds(right.toEpochMilli()) + nanosResult.seconds;
+    result.set(totalSeconds, nanosResult.nanos);
+    return true;
+  }
+
+  @Deprecated
+  public boolean subtract(java.sql.Timestamp left, java.sql.Timestamp right,
+      HiveIntervalDayTime result) {
+    if (left == null || right == null) {
+      return false;
+    }
+
+    nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
+
     long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime())
         - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds;
     result.set(totalSeconds, nanosResult.nanos);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
index 613d7a8..40d60f3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
@@ -28,21 +28,19 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
-import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.udf.UDFAscii;
 import org.apache.hadoop.hive.ql.udf.UDFLn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -62,10 +60,10 @@ public class TestFunctionRegistry extends TestCase {
     public void one(IntWritable x, HiveDecimalWritable y) {}
     public void one(IntWritable x, DoubleWritable y) {}
     public void one(IntWritable x, IntWritable y) {}
-    public void mismatch(DateWritable x, HiveDecimalWritable y) {}
-    public void mismatch(TimestampWritable x, HiveDecimalWritable y) {}
+    public void mismatch(DateWritableV2 x, HiveDecimalWritable y) {}
+    public void mismatch(TimestampWritableV2 x, HiveDecimalWritable y) {}
     public void mismatch(BytesWritable x, DoubleWritable y) {}
-    public void typeaffinity1(DateWritable x) {}
+    public void typeaffinity1(DateWritableV2 x) {}
     public void typeaffinity1(DoubleWritable x) {};
     public void typeaffinity1(Text x) {}
     public void typeaffinity2(IntWritable x) {}
@@ -158,8 +156,8 @@ public class TestFunctionRegistry extends TestCase {
     typeAffinity("typeaffinity1", TypeInfoFactory.floatTypeInfo, 1, DoubleWritable.class);
 
     // Prefer date type arguments over other method signatures
-    typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritable.class);
-    typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritable.class);
+    typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritableV2.class);
+    typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritableV2.class);
 
     // String type affinity
     typeAffinity("typeaffinity1", TypeInfoFactory.stringTypeInfo, 1, Text.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index fdc268c..90eb45b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -36,7 +36,6 @@ import static org.mockito.Mockito.when;
 import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
@@ -55,6 +54,7 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.Context;
@@ -132,8 +132,7 @@ public class TestUtilities {
 
   @Test
   public void testSerializeTimestamp() {
-    Timestamp ts = new Timestamp(1374554702000L);
-    ts.setNanos(123456);
+    Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
     ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
     children.add(constant);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
index c8ae73a..7e5e5fb 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
@@ -19,17 +19,17 @@ package org.apache.hadoop.hive.ql.exec.persistence;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -55,7 +55,7 @@ public class TestRowContainer {
       ObjectInspectorUtils.getStandardObjectInspector(serde.getObjectInspector()));
     result.setTableDesc(
       PTFRowContainer.createTableDesc((StructObjectInspector) serde.getObjectInspector()));
-    TimestampWritable key = new TimestampWritable(new Timestamp(10));
+    TimestampWritableV2 key = new TimestampWritableV2(Timestamp.ofEpochMilli(10));
     result.setKeyObject(Lists.newArrayList(key));
     List<Writable> row;
     // will trigger 2 spills