You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2018/06/25 04:40:28 UTC

[30/33] hive git commit: HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
index 68b038f..cedbba2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -21,12 +21,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import java.sql.Timestamp;
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
 // type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
@@ -44,7 +42,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     super(outputColumnNum);
     this.colNum = colNum;
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.value.setTime(DateWritableV2.daysToMillis((int) value));
   }
 
   public DateScalarSubtractDateColumn() {
@@ -91,7 +89,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     if (inputColVector2.isRepeating) {
       if (inputColVector2.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+        scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
         dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
         outputColVector.setFromScratchIntervalDayTime(0);
       } else {
@@ -108,14 +106,14 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = false;
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         Arrays.fill(outputIsNull, 0, n, false);
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -129,14 +127,14 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = inputIsNull[i];
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -148,7 +146,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + value + ", " + getColumnParamString(1, colNum);
+    return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
index e150789..e444b68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descript
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
index 252a816..eff20c9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -181,9 +181,9 @@ public class DynamicValueVectorExpression extends VectorExpression {
         decimalValue = PrimitiveObjectInspectorUtils.getHiveDecimal(val, poi);
         break;
       case DATE:
-        longValue = DateWritable.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
+        longValue = DateWritableV2.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
       case TIMESTAMP:
-        timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi);
+        timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi).toSqlTimestamp();
         break;
       case INTERVAL_YEAR_MONTH:
         longValue = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(val, poi).getTotalMonths();

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
index 0d9f9f7..f924e2e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 abstract public class TimestampToStringUnaryUDF extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
-  private final int inputColumn;
+  protected final int inputColumn;
 
   public TimestampToStringUnaryUDF(int inputColumn, int outputColumnNum) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
index 48638b7..929f845 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
@@ -41,7 +41,7 @@ public interface VectorExpressionWriter {
   Object writeValue(byte[] value, int start, int length) throws HiveException;
   Object writeValue(HiveDecimalWritable value) throws HiveException;
   Object writeValue(HiveDecimal value) throws HiveException;
-  Object writeValue(TimestampWritable value) throws HiveException;
+  Object writeValue(TimestampWritableV2 value) throws HiveException;
   Object writeValue(Timestamp value) throws HiveException;
   Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException;
   Object writeValue(HiveIntervalDayTime value) throws HiveException;

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
index a086535..6a87927 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
@@ -70,7 +70,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
 
 /**
  * VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects.
@@ -185,14 +184,14 @@ public final class VectorExpressionWriterFactory {
      * The base implementation must be overridden by the Timestamp specialization
      */
     @Override
-    public Object writeValue(TimestampWritable value) throws HiveException {
+    public Object writeValue(TimestampWritableV2 value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
     /**
      * The base implementation must be overridden by the Timestamp specialization
      */
-    public Object setValue(Object field, TimestampWritable value) throws HiveException {
+    public Object setValue(Object field, TimestampWritableV2 value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
@@ -471,9 +470,9 @@ public final class VectorExpressionWriterFactory {
     @Override
     public Object writeValue(ColumnVector column, int row) throws HiveException {
       TimestampColumnVector dcv = (TimestampColumnVector) column;
-      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
+      TimestampWritableV2 timestampWritable = (TimestampWritableV2) dcv.getScratchWritable();
       if (timestampWritable == null) {
-        timestampWritable = new TimestampWritable();
+        timestampWritable = new TimestampWritableV2();
         dcv.setScratchWritable(timestampWritable);
       }
       if (dcv.noNulls && !dcv.isRepeating) {
@@ -498,9 +497,9 @@ public final class VectorExpressionWriterFactory {
     @Override
     public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
       TimestampColumnVector dcv = (TimestampColumnVector) column;
-      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
+      TimestampWritableV2 timestampWritable = (TimestampWritableV2) dcv.getScratchWritable();
       if (timestampWritable == null) {
-        timestampWritable = new TimestampWritable();
+        timestampWritable = new TimestampWritableV2();
         dcv.setScratchWritable(timestampWritable);
       }
       if (dcv.noNulls && !dcv.isRepeating) {
@@ -786,7 +785,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object setValue(Object field, TimestampWritable value) {
+      public Object setValue(Object field, TimestampWritableV2 value) {
         if (null == field) {
           field = initValue(null);
         }
@@ -824,7 +823,7 @@ public final class VectorExpressionWriterFactory {
 
       @Override
       public Object writeValue(long value) {
-        dt.setTime(DateWritable.daysToMillis((int) value));
+        dt.setTime(DateWritableV2.daysToMillis((int) value));
         ((SettableDateObjectInspector) this.objectInspector).set(obj, dt);
         return obj;
       }
@@ -834,7 +833,7 @@ public final class VectorExpressionWriterFactory {
         if (null == field) {
           field = initValue(null);
         }
-        dt.setTime(DateWritable.daysToMillis((int) value));
+        dt.setTime(DateWritableV2.daysToMillis((int) value));
         ((SettableDateObjectInspector) this.objectInspector).set(field, dt);
         return field;
       }
@@ -860,7 +859,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object writeValue(TimestampWritable value) throws HiveException {
+      public Object writeValue(TimestampWritableV2 value) throws HiveException {
         return ((SettableTimestampObjectInspector) this.objectInspector).set(obj, value);
       }
 
@@ -880,7 +879,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object setValue(Object field, TimestampWritable value) {
+      public Object setValue(Object field, TimestampWritableV2 value) {
         if (null == field) {
           field = initValue(null);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
index 0c83494..998b6b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -25,15 +26,12 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.DateParser;
 
-import java.util.Arrays;
-import java.sql.Date;
-
 public class VectorUDFDateAddColCol extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
@@ -43,7 +41,6 @@ public class VectorUDFDateAddColCol extends VectorExpression {
   protected boolean isPositive = true;
 
   private transient final Text text = new Text();
-  private transient final Date date = new Date(0);
   private transient final DateParser dateParser = new DateParser();
 
   // Transient members initialized by transientInit method.
@@ -261,7 +258,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
   protected long evaluateTimestamp(ColumnVector columnVector, int index, long numDays) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritable.millisToDays(tcv.getTime(index));
+    long days = DateWritableV2.millisToDays(tcv.getTime(index));
     if (isPositive) {
       days += numDays;
     } else {
@@ -281,7 +278,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
     }
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritable.millisToDays(tcv.getTime(0));
+    long days = DateWritableV2.millisToDays(tcv.getTime(0));
 
     evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
   }
@@ -292,13 +289,14 @@ public class VectorUDFDateAddColCol extends VectorExpression {
       outputVector.isNull[index] = true;
     } else {
       text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
-      boolean parsed = dateParser.parseDate(text.toString(), date);
+      Date hDate = new Date();
+      boolean parsed = dateParser.parseDate(text.toString(), hDate);
       if (!parsed) {
         outputVector.noNulls = false;
         outputVector.isNull[index] = true;
         return;
       }
-      long days = DateWritable.millisToDays(date.getTime());
+      long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
       if (isPositive) {
         days += numDays;
       } else {
@@ -319,6 +317,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
     }
     text.set(
         inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]);
+    Date date = new Date();
     boolean parsed = dateParser.parseDate(text.toString(), date);
     if (!parsed) {
       outputVector.noNulls = false;
@@ -326,7 +325,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
       outputVector.isRepeating = true;
       return;
     }
-    long days = DateWritable.millisToDays(date.getTime());
+    long days = DateWritableV2.millisToDays(date.toEpochMilli());
 
     evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
index a73d2e6..30b20c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
@@ -25,13 +25,12 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.DateParser;
 
-import java.sql.Date;
 import java.util.Arrays;
 
 public class VectorUDFDateAddColScalar extends VectorExpression {
@@ -44,7 +43,6 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
 
   private transient final Text text = new Text();
   private transient final DateParser dateParser = new DateParser();
-  private transient final Date date = new Date(0);
 
   // Transient members initialized by transientInit method.
   private transient PrimitiveCategory primitiveCategory;
@@ -305,7 +303,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
   protected long evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritable.millisToDays(tcv.getTime(index));
+    long days = DateWritableV2.millisToDays(tcv.getTime(index));
     if (isPositive) {
       days += numDays;
     } else {
@@ -328,13 +326,14 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
   protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
     BytesColumnVector bcv = (BytesColumnVector) columnVector;
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
-    boolean parsed = dateParser.parseDate(text.toString(), date);
+    org.apache.hadoop.hive.common.type.Date hDate = new org.apache.hadoop.hive.common.type.Date();
+    boolean parsed = dateParser.parseDate(text.toString(), hDate);
     if (!parsed) {
       outputVector.noNulls = false;
       outputVector.isNull[i] = true;
       return;
     }
-    long days = DateWritable.millisToDays(date.getTime());
+    long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
     if (isPositive) {
       days += numDays;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
index c861321..669ca55 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
@@ -18,17 +18,17 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.common.util.DateParser;
 
 import java.nio.charset.StandardCharsets;
-import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.Arrays;
 
@@ -46,7 +46,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
   protected boolean isPositive = true;
 
   private transient final DateParser dateParser = new DateParser();
-  private transient final Date baseDate = new Date(0);
+  private transient final Date baseDate = new Date();
 
   // Transient members initialized by transientInit method.
   private transient PrimitiveCategory primitiveCategory;
@@ -99,11 +99,11 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
 
     switch (primitiveCategory) {
       case DATE:
-        baseDate.setTime(DateWritable.daysToMillis((int) longValue));
+        baseDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
         break;
 
       case TIMESTAMP:
-        baseDate.setTime(timestampValue.getTime());
+        baseDate.setTimeInMillis(timestampValue.getTime());
         break;
 
       case STRING:
@@ -137,7 +137,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
     // We do not need to do a column reset since we are carefully changing the output.
     outputColVector.isRepeating = false;
 
-    long baseDateDays = DateWritable.millisToDays(baseDate.getTime());
+    long baseDateDays = DateWritableV2.millisToDays(baseDate.toEpochMilli());
     if (inputCol.isRepeating) {
       if (inputCol.noNulls || !inputCol.isNull[0]) {
         outputColVector.isNull[0] = false;
@@ -245,11 +245,12 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
   public String vectorExpressionParameters() {
     String value;
     if (object instanceof Long) {
-      Date tempDate = new Date(0);
-      tempDate.setTime(DateWritable.daysToMillis((int) longValue));
+      Date tempDate = new Date();
+      tempDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
       value = tempDate.toString();
     } else if (object instanceof Timestamp) {
-      value = this.timestampValue.toString();
+      value = org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
+          timestampValue.getTime(), timestampValue.getNanos()).toString();
     } else if (object instanceof byte []) {
       value = new String(this.stringValue, StandardCharsets.UTF_8);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
index bb12fcb..ff29ee3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
@@ -20,13 +20,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -217,7 +216,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
         String string = new String(input.vector[0], input.start[0], input.length[0]);
         try {
           date.setTime(formatter.parse(string).getTime());
-          output.vector[0] = DateWritable.dateToDays(date);
+          output.vector[0] = DateWritableV2.dateToDays(date);
           output.isNull[0] = false;
         } catch (ParseException e) {
           output.isNull[0] = true;
@@ -296,7 +295,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
     String string = new String(input.vector[i], input.start[i], input.length[i]);
     try {
       date.setTime(formatter.parse(string).getTime());
-      output.vector[i] = DateWritable.dateToDays(date);
+      output.vector[i] = DateWritableV2.dateToDays(date);
     } catch (ParseException e) {
       output.isNull[i] = true;
       output.noNulls = false;
@@ -314,7 +313,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
     if (input.isRepeating) {
       if (input.noNulls || !input.isNull[0]) {
         date.setTime(input.getTime(0));
-        output.vector[0] = DateWritable.dateToDays(date);
+        output.vector[0] = DateWritableV2.dateToDays(date);
         output.isNull[0] = false;
       } else {
         output.isNull[0] = true;
@@ -334,14 +333,14 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
           for (int j=0; j < size; j++) {
             int i = sel[j];
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritable.dateToDays(date);
+            output.vector[i] = DateWritableV2.dateToDays(date);
             output.isNull[i] = false;
           }
         } else {
           for (int j=0; j < size; j++) {
             int i = sel[j];
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritable.dateToDays(date);
+            output.vector[i] = DateWritableV2.dateToDays(date);
           }
         }
       } else {
@@ -352,7 +351,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
         }
         for (int i = 0; i < size; i++) {
           date.setTime(input.getTime(i));
-          output.vector[i] = DateWritable.dateToDays(date);
+          output.vector[i] = DateWritableV2.dateToDays(date);
         }
       }
     } else /* there are nulls in our column */ {
@@ -373,14 +372,14 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
           int i = sel[j];
           if (!input.isNull[i]) {
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritable.dateToDays(date);
+            output.vector[i] = DateWritableV2.dateToDays(date);
           }
         }
       } else {
         for (int i = 0; i < size; i++) {
           if (!input.isNull[i]) {
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritable.dateToDays(date);
+            output.vector[i] = DateWritableV2.dateToDays(date);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
index 55af413..caedc80 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -26,12 +25,11 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 
-import java.nio.charset.StandardCharsets;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.text.ParseException;
@@ -106,7 +104,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
 
       case TIMESTAMP:
         date.setTime(timestampValue.getTime());
-        baseDate = DateWritable.dateToDays(date);
+        baseDate = DateWritableV2.dateToDays(date);
         break;
 
       case STRING:
@@ -114,7 +112,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
       case VARCHAR:
         try {
           date.setTime(formatter.parse(new String(bytesValue, "UTF-8")).getTime());
-          baseDate = DateWritable.dateToDays(date);
+          baseDate = DateWritableV2.dateToDays(date);
           break;
         } catch (Exception e) {
           outputColVector.noNulls = false;
@@ -346,7 +344,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     date.setTime(tcv.getTime(index));
-    return DateWritable.dateToDays(date) - baseDate;
+    return DateWritableV2.dateToDays(date) - baseDate;
   }
 
   protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -359,7 +357,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
     try {
       date.setTime(formatter.parse(text.toString()).getTime());
-      output.vector[i] = DateWritable.dateToDays(date) - baseDate;
+      output.vector[i] = DateWritableV2.dateToDays(date) - baseDate;
     } catch (ParseException e) {
       output.vector[i] = 1;
       output.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
index c51d3cd..28addf7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
@@ -105,7 +105,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
 
       case TIMESTAMP:
         date.setTime(timestampValue.getTime());
-        baseDate = DateWritable.dateToDays(date);
+        baseDate = DateWritableV2.dateToDays(date);
         break;
 
       case STRING:
@@ -113,7 +113,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
       case VARCHAR:
         try {
           date.setTime(formatter.parse(new String(stringValue, "UTF-8")).getTime());
-          baseDate = DateWritable.dateToDays(date);
+          baseDate = DateWritableV2.dateToDays(date);
           break;
         } catch (Exception e) {
           outputColVector.noNulls = false;
@@ -346,7 +346,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     date.setTime(tcv.getTime(index));
-    return baseDate - DateWritable.dateToDays(date);
+    return baseDate - DateWritableV2.dateToDays(date);
   }
 
   protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -359,7 +359,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
     try {
       date.setTime(formatter.parse(text.toString()).getTime());
-      output.vector[i] = baseDate - DateWritable.dateToDays(date);
+      output.vector[i] = baseDate - DateWritableV2.dateToDays(date);
     } catch (ParseException e) {
       output.vector[i] = 1;
       output.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
index 202f5d8..8e5f9da 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
@@ -18,14 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-import java.io.UnsupportedEncodingException;
-import java.sql.Date;
-import java.text.SimpleDateFormat;
-
 /**
  * Vectorized version of TO_DATE(TIMESTAMP)/TO_DATE(DATE).
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
index 8d87ef7..accf32c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
@@ -18,17 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hive.common.util.DateParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.sql.Date;
-
 /**
  * Vectorized version of TO_DATE(STRING)
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
index 7e7080c..5b497e5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
@@ -18,15 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-import java.io.UnsupportedEncodingException;
-import java.sql.Date;
-import java.text.SimpleDateFormat;
-
 /**
  * Vectorized version of TO_DATE(timestamp).
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
index a701d70..e8f1454 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
@@ -23,6 +23,7 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.TimeZone;
 
 import org.apache.hadoop.io.Text;
 
@@ -35,7 +36,8 @@ public final class VectorUDFDayOfWeekString extends VectorUDFTimestampFieldStrin
   private static final long serialVersionUID = 1L;
 
   private transient final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
-  private transient final Calendar calendar = Calendar.getInstance();
+  private transient final Calendar calendar = Calendar.getInstance(
+      TimeZone.getTimeZone("UTC"));
 
   public VectorUDFDayOfWeekString(int colNum, int outputColumnNum) {
     super(colNum, outputColumnNum, -1, -1);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
index a1167b2..837de9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
@@ -20,13 +20,14 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
+import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.common.util.DateUtils;
@@ -43,7 +44,8 @@ public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance();
+  protected transient final Calendar calendar = Calendar.getInstance(
+      TimeZone.getTimeZone("UTC"));
 
   public VectorUDFTimestampFieldDate(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);
@@ -69,7 +71,7 @@ public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
   }
 
   protected long getDateField(long days) {
-    calendar.setTimeInMillis(DateWritable.daysToMillis((int) days));
+    calendar.setTimeInMillis(DateWritableV2.daysToMillis((int) days));
     return calendar.get(field);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
index 931cc98..9acfa86 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 import java.text.ParseException;
 import java.util.Calendar;
+import java.util.TimeZone;
 
 /**
  * Abstract class to return various fields from a String.
@@ -39,7 +40,8 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
   protected final int fieldLength;
   private static final String patternMin = "0000-00-00 00:00:00.000000000";
   private static final String patternMax = "9999-19-99 29:59:59.999999999";
-  protected transient final Calendar calendar = Calendar.getInstance();
+  protected transient final Calendar calendar = Calendar.getInstance(
+      TimeZone.getTimeZone("UTC"));
 
   public VectorUDFTimestampFieldString(int colNum, int outputColumnNum, int fieldStart, int fieldLength) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
index a9ea1f6..94e8b47 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
+import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -43,7 +44,8 @@ public abstract class VectorUDFTimestampFieldTimestamp extends VectorExpression
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance();
+  protected transient final Calendar calendar = Calendar.getInstance(
+      TimeZone.getTimeZone("UTC"));
 
   public VectorUDFTimestampFieldTimestamp(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
index 3515329..1f83ead 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Return Unix Timestamp.
@@ -28,7 +28,7 @@ public final class VectorUDFUnixTimeStampDate extends VectorUDFTimestampFieldDat
 
   private static final long serialVersionUID = 1L;
 
-  private DateWritable dateWritable;
+  private DateWritableV2 dateWritable;
 
   @Override
   protected long getDateField(long days) {
@@ -39,7 +39,7 @@ public final class VectorUDFUnixTimeStampDate extends VectorUDFTimestampFieldDat
   public VectorUDFUnixTimeStampDate(int colNum, int outputColumnNum) {
     /* not a real field */
     super(-1, colNum, outputColumnNum);
-    dateWritable = new DateWritable();
+    dateWritable = new DateWritableV2();
   }
 
   public VectorUDFUnixTimeStampDate() {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
index aaea433..3b5b33b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
@@ -25,6 +25,7 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.TimeZone;
 
 /**
  * Return Unix Timestamp.
@@ -34,8 +35,9 @@ public final class VectorUDFUnixTimeStampString extends VectorUDFTimestampFieldS
 
   private static final long serialVersionUID = 1L;
 
-  private transient final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-  private transient final Calendar calendar = Calendar.getInstance();
+  private transient final SimpleDateFormat format = getFormatter();
+  private transient final Calendar calendar = Calendar.getInstance(
+      TimeZone.getTimeZone("UTC"));
 
   public VectorUDFUnixTimeStampString(int colNum, int outputColumnNum) {
     super(colNum, outputColumnNum, -1, -1);
@@ -56,4 +58,10 @@ public final class VectorUDFUnixTimeStampString extends VectorUDFTimestampFieldS
     calendar.setTime(date);
     return calendar.getTimeInMillis() / 1000;
   }
+
+  private static SimpleDateFormat getFormatter() {
+    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+    format.setTimeZone(TimeZone.getTimeZone("UTC"));
+    return format;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
index e011657..31ef6ad 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
@@ -17,37 +17,17 @@
  */
 package org.apache.hadoop.hive.ql.exec.vector.udf;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Map;
-
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
-import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.SettableMapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.Text;
 
 /**
  * A VectorUDFAdaptor is a vectorized expression for invoking a custom

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
index c88ee99..434a5b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hive.llap.DebugUtils;
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,6 +37,7 @@ import java.util.Map;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -47,13 +48,12 @@ import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -429,18 +429,18 @@ public abstract class BatchToRowReader<StructType, UnionType>
     }
   }
 
-  public static DateWritable nextDate(ColumnVector vector,
-                               int row,
-                               Object previous) {
+  public static DateWritableV2 nextDate(ColumnVector vector,
+                                        int row,
+                                        Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      DateWritable result;
-      if (previous == null || previous.getClass() != DateWritable.class) {
-        result = new DateWritable();
+      DateWritableV2 result;
+      if (previous == null || previous.getClass() != DateWritableV2.class) {
+        result = new DateWritableV2();
       } else {
-        result = (DateWritable) previous;
+        result = (DateWritableV2) previous;
       }
       int date = (int) ((LongColumnVector) vector).vector[row];
       result.set(date);
@@ -450,18 +450,18 @@ public abstract class BatchToRowReader<StructType, UnionType>
     }
   }
 
-  public static TimestampWritable nextTimestamp(ColumnVector vector,
-                                         int row,
-                                         Object previous) {
+  public static TimestampWritableV2 nextTimestamp(ColumnVector vector,
+                                                  int row,
+                                                  Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      TimestampWritable result;
-      if (previous == null || previous.getClass() != TimestampWritable.class) {
-        result = new TimestampWritable();
+      TimestampWritableV2 result;
+      if (previous == null || previous.getClass() != TimestampWritableV2.class) {
+        result = new TimestampWritableV2();
       } else {
-        result = (TimestampWritable) previous;
+        result = (TimestampWritableV2) previous;
       }
       TimestampColumnVector tcv = (TimestampColumnVector) vector;
       result.setInternal(tcv.time[row], tcv.nanos[row]);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index 56d590c..e7dfb05 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -58,6 +58,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
   public static class ReaderOptions extends org.apache.orc.OrcFile.ReaderOptions {
     public ReaderOptions(Configuration conf) {
       super(conf);
+      useUTCTimestamp(true);
     }
 
     public ReaderOptions filesystem(FileSystem fs) {
@@ -79,6 +80,11 @@ public final class OrcFile extends org.apache.orc.OrcFile {
       super.orcTail(orcTail);
       return this;
     }
+
+    public ReaderOptions useUTCTimestamp(boolean value) {
+      super.useUTCTimestamp(value);
+      return this;
+    }
   }
 
   public static ReaderOptions readerOptions(Configuration conf) {
@@ -104,6 +110,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
 
     WriterOptions(Properties tableProperties, Configuration conf) {
       super(tableProperties, conf);
+      useUTCTimestamp(true);
     }
 
    /**
@@ -275,6 +282,11 @@ public final class OrcFile extends org.apache.orc.OrcFile {
       return this;
     }
 
+    public WriterOptions useUTCTimestamp(boolean value) {
+      super.useUTCTimestamp(value);
+      return this;
+    }
+
     ObjectInspector getInspector() {
       return inspector;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index c6fe4fc..d177e3f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -38,20 +38,19 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.orc.OrcFile;
 import org.apache.orc.TypeDescription;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -415,18 +414,18 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
     }
   }
 
-  static DateWritable nextDate(ColumnVector vector,
-                               int row,
-                               Object previous) {
+  static DateWritableV2 nextDate(ColumnVector vector,
+                                 int row,
+                                 Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      DateWritable result;
-      if (previous == null || previous.getClass() != DateWritable.class) {
-        result = new DateWritable();
+      DateWritableV2 result;
+      if (previous == null || previous.getClass() != DateWritableV2.class) {
+        result = new DateWritableV2();
       } else {
-        result = (DateWritable) previous;
+        result = (DateWritableV2) previous;
       }
       int date = (int) ((LongColumnVector) vector).vector[row];
       result.set(date);
@@ -436,18 +435,18 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
     }
   }
 
-  static TimestampWritable nextTimestamp(ColumnVector vector,
-                                         int row,
-                                         Object previous) {
+  static TimestampWritableV2 nextTimestamp(ColumnVector vector,
+                                           int row,
+                                           Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      TimestampWritable result;
-      if (previous == null || previous.getClass() != TimestampWritable.class) {
-        result = new TimestampWritable();
+      TimestampWritableV2 result;
+      if (previous == null || previous.getClass() != TimestampWritableV2.class) {
+        result = new TimestampWritableV2();
       } else {
-        result = (TimestampWritable) previous;
+        result = (TimestampWritableV2) previous;
       }
       TimestampColumnVector tcv = (TimestampColumnVector) vector;
       result.setInternal(tcv.time[row], tcv.nanos[row]);

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 91a01e9..4082c61 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -19,19 +19,17 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -43,6 +41,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
@@ -63,8 +62,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectIn
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.orc.PhysicalWriter;
 import org.apache.orc.TypeDescription;
 
 /**
@@ -206,9 +203,9 @@ public class WriterImpl extends org.apache.orc.impl.WriterImpl implements Writer
             }
             case TIMESTAMP: {
               TimestampColumnVector vector = (TimestampColumnVector) column;
-              Timestamp ts = ((TimestampObjectInspector) inspector)
-                  .getPrimitiveJavaObject(obj);
-              vector.set(rowId, ts);
+              vector.setIsUTC(true);
+              vector.set(rowId, ((TimestampObjectInspector) inspector)
+                  .getPrimitiveJavaObject(obj).toSqlTimestamp());
               break;
             }
             case DATE: {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
index 8be8d13..08788cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
@@ -14,18 +14,18 @@
 package org.apache.hadoop.hive.ql.io.parquet.convert;
 
 import java.math.BigDecimal;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -340,12 +340,12 @@ public enum ETypeConverter {
       };
     }
   },
-  ETIMESTAMP_CONVERTER(TimestampWritable.class) {
+  ETIMESTAMP_CONVERTER(TimestampWritableV2.class) {
     @Override
     PrimitiveConverter getConverter(final PrimitiveType type, final int index, final ConverterParent parent, TypeInfo hiveTypeInfo) {
-      return new BinaryConverter<TimestampWritable>(type, parent, index) {
+      return new BinaryConverter<TimestampWritableV2>(type, parent, index) {
         @Override
-        protected TimestampWritable convert(Binary binary) {
+        protected TimestampWritableV2 convert(Binary binary) {
           NanoTime nt = NanoTime.fromBinary(binary);
           Map<String, String> metadata = parent.getMetadata();
           //Current Hive parquet timestamp implementation stores it in UTC, but other components do not do that.
@@ -353,18 +353,18 @@ public enum ETypeConverter {
           boolean skipConversion = Boolean.parseBoolean(
               metadata.get(HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION.varname));
           Timestamp ts = NanoTimeUtils.getTimestamp(nt, skipConversion);
-          return new TimestampWritable(ts);
+          return new TimestampWritableV2(ts);
         }
       };
     }
   },
-  EDATE_CONVERTER(DateWritable.class) {
+  EDATE_CONVERTER(DateWritableV2.class) {
     @Override
     PrimitiveConverter getConverter(final PrimitiveType type, final int index, final ConverterParent parent, TypeInfo hiveTypeInfo) {
       return new PrimitiveConverter() {
         @Override
         public void addInt(final int value) {
-          parent.set(index, new DateWritable(value));
+          parent.set(index, new DateWritableV2(value));
         }
       };
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
index dead324..106ace0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
@@ -18,7 +18,7 @@ import java.nio.charset.CharacterCodingException;
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.BooleanWritable;
 
 /**
- * The ParquetStringInspector inspects a BytesWritable, TimestampWritable, HiveDecimalWritable,
+ * The ParquetStringInspector inspects a BytesWritable, TimestampWritableV2, HiveDecimalWritable,
  * DoubleWritable, FloatWritable, LongWritable, IntWritable, and BooleanWritable to give a Text
  * or String.
  *
@@ -58,7 +58,7 @@ public class ParquetStringInspector extends JavaStringObjectInspector implements
       return new Text((String) o);
     }
 
-    if ((o instanceof TimestampWritable) || (o instanceof HiveDecimalWritable)
+    if ((o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable)
         || (o instanceof DoubleWritable) || (o instanceof FloatWritable)
         || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -90,7 +90,7 @@ public class ParquetStringInspector extends JavaStringObjectInspector implements
       return (String) o;
     }
 
-    if ((o instanceof TimestampWritable) || (o instanceof HiveDecimalWritable)
+    if ((o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable)
         || (o instanceof DoubleWritable) || (o instanceof FloatWritable)
         || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
index 677fb53..bf78d8c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
@@ -13,12 +13,13 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.timestamp;
 
-import java.sql.Timestamp;
 import java.util.Calendar;
 import java.util.GregorianCalendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
+
 import jodd.datetime.JDateTime;
 
 /**
@@ -58,7 +59,7 @@ public class NanoTimeUtils {
    public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) {
 
      Calendar calendar = getCalendar(skipConversion);
-     calendar.setTime(ts);
+     calendar.setTimeInMillis(ts.toEpochMilli());
      int year = calendar.get(Calendar.YEAR);
      if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
        year = 1 - year;
@@ -106,8 +107,7 @@ public class NanoTimeUtils {
      calendar.set(Calendar.HOUR_OF_DAY, hour);
      calendar.set(Calendar.MINUTE, minutes);
      calendar.set(Calendar.SECOND, seconds);
-     Timestamp ts = new Timestamp(calendar.getTimeInMillis());
-     ts.setNanos((int) nanos);
+     Timestamp ts = Timestamp.ofEpochMilli(calendar.getTimeInMillis(), (int) nanos);
      return ts;
    }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
index 4fd4cfd..26a4511 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
@@ -19,10 +19,10 @@
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
 import org.apache.parquet.bytes.ByteBufferInputStream;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.parquet.column.Dictionary;
 
 import java.io.IOException;
-import java.sql.Timestamp;
 
 /**
  * The interface to wrap the underlying Parquet dictionary and non dictionary encoded page reader.

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
index f5f19e1..9170e9f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
 import org.apache.hadoop.hive.common.type.HiveBaseChar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
@@ -41,7 +42,6 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
-import java.sql.Timestamp;
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
index e89a736..e4c6156 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
@@ -13,6 +13,7 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -387,7 +388,7 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
         switch (descriptor.getType()) {
         //INT64 is not yet supported
         case INT96:
-          c.set(rowId, dataColumn.readTimestamp());
+          c.set(rowId, dataColumn.readTimestamp().toSqlTimestamp());
           break;
         default:
           throw new IOException(
@@ -510,7 +511,7 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
     case TIMESTAMP:
       for (int i = rowId; i < rowId + num; ++i) {
         ((TimestampColumnVector) column)
-            .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]));
+            .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
       }
       break;
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
index cf1210b..3d61c33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
@@ -13,12 +13,12 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.write;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -46,9 +46,9 @@ import org.apache.parquet.io.api.RecordConsumer;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.OriginalType;
 import org.apache.parquet.schema.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
@@ -550,7 +550,7 @@ public class DataWritableWriter {
     @Override
     public void write(Object value) {
       Date vDate = inspector.getPrimitiveJavaObject(value);
-      recordConsumer.addInteger(DateWritable.dateToDays(vDate));
+      recordConsumer.addInteger(DateWritableV2.dateToDays(vDate));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index 44687ef..7af6dab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -54,7 +54,7 @@ import org.apache.hadoop.hive.ql.metadata.UniqueConstraint.UniqueConstraintCol;
 import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol;
 import org.apache.hadoop.hive.ql.plan.DescTableDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hive.common.util.HiveStringUtils;
 
 import com.google.common.collect.Lists;
@@ -107,7 +107,7 @@ public final class MetaDataFormatUtils {
       return "";
     }
 
-    DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch());
+    DateWritableV2 writableValue = new DateWritableV2((int) val.getDaysSinceEpoch());
     return writableValue.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
index 857f300..f3ec709 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
@@ -66,7 +66,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -180,7 +180,7 @@ public class StatsOptimizer extends Transform {
 
     enum DateSubType {
       DAYS {@Override
-        Object cast(long longValue) { return (new DateWritable((int)longValue)).get();}
+        Object cast(long longValue) { return (new DateWritableV2((int)longValue)).get();}
       };
       abstract Object cast(long longValue);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index 12af94e..d950991 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -18,8 +18,6 @@
 package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
 
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -45,9 +43,11 @@ import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.TimeString;
 import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.Hive;

http://git-wip-us.apache.org/repos/asf/hive/blob/b8fda81c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 7cedab6..2ae015a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -46,12 +46,14 @@ import org.apache.calcite.util.ConversionUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.NlsString;
 import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -99,14 +101,11 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
-import java.sql.Timestamp;
 import java.time.Instant;
 import java.util.ArrayList;
 import java.util.Calendar;
-import java.util.Date;
 import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 
 public class RexNodeConverter {
@@ -394,8 +393,6 @@ public class RexNodeConverter {
       GenericUDF udf = func.getGenericUDF();
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
-          // Calcite can not specify the scale for timestamp. As a result, all
-          // the millisecond part will be lost
           || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ)
           || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
         castExpr = cluster.getRexBuilder().makeAbstractCast(
@@ -701,9 +698,9 @@ public class RexNodeConverter {
       calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
       break;
     case DATE:
-      final Calendar cal = Calendar.getInstance(Locale.getDefault());
-      cal.setTime((Date) value);
-      calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
+      final Date date = (Date) value;
+      calciteLiteral = rexBuilder.makeDateLiteral(
+          DateString.fromDaysSinceEpoch(date.toEpochDay()));
       break;
     case TIMESTAMP:
       final TimestampString tsString;
@@ -711,9 +708,7 @@ public class RexNodeConverter {
         tsString = TimestampString.fromCalendarFields((Calendar) value);
       } else {
         final Timestamp ts = (Timestamp) value;
-        final Calendar calt = Calendar.getInstance(Locale.getDefault());
-        calt.setTimeInMillis(ts.getTime());
-        tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
+        tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
       }
       // Must call makeLiteral, not makeTimestampLiteral
       // to have the RexBuilder.roundTime logic kick in