You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2018/06/25 10:38:35 UTC

[27/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
index 6fd8e09..f163289 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
@@ -23,7 +23,7 @@ import org.junit.Test;
 import java.sql.Timestamp;
 import java.util.Random;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
 
 import static org.junit.Assert.*;
@@ -45,7 +45,7 @@ public class TestTimestampWritableAndColumnVector {
     Timestamp[] randTimestamps = new Timestamp[VectorizedRowBatch.DEFAULT_SIZE];
 
     for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
-      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r).toSqlTimestamp();
+      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
       randTimestamps[i] = randTimestamp;
       timestampColVector.set(i, randTimestamp);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
index ffdc410..e65288b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
@@ -71,7 +71,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -1867,8 +1867,8 @@ public class TestVectorGroupByOperator {
           } else if (key instanceof LongWritable) {
             LongWritable lwKey = (LongWritable)key;
             keyValue.add(lwKey.get());
-          } else if (key instanceof TimestampWritableV2) {
-            TimestampWritableV2 twKey = (TimestampWritableV2)key;
+          } else if (key instanceof TimestampWritable) {
+            TimestampWritable twKey = (TimestampWritable)key;
             keyValue.add(twKey.getTimestamp());
           } else if (key instanceof DoubleWritable) {
             DoubleWritable dwKey = (DoubleWritable)key;
@@ -1988,9 +1988,9 @@ public class TestVectorGroupByOperator {
         } else if (key instanceof LongWritable) {
           LongWritable lwKey = (LongWritable)key;
           keyValue = lwKey.get();
-        } else if (key instanceof TimestampWritableV2) {
-          TimestampWritableV2 twKey = (TimestampWritableV2)key;
-          keyValue = twKey.getTimestamp().toSqlTimestamp();
+        } else if (key instanceof TimestampWritable) {
+          TimestampWritable twKey = (TimestampWritable)key;
+          keyValue = twKey.getTimestamp();
         } else if (key instanceof DoubleWritable) {
           DoubleWritable dwKey = (DoubleWritable)key;
           keyValue = dwKey.get();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
index 70a481d..f51b8bb 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
@@ -21,12 +21,12 @@ package org.apache.hadoop.hive.ql.exec.vector;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.BRoundWithNumDigitsDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColOrCol;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
index 6181ae8..ae91b73 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.text.ParseException;
-
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -31,14 +32,12 @@ import java.util.Set;
 import org.apache.commons.lang.StringUtils;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
@@ -1299,11 +1298,15 @@ public class VectorRandomRowSource {
   }
 
   public static String randomPrimitiveDateStringObject(Random r) {
-    return RandomTypeUtil.getRandDate(r).toString();
+    Date randomDate = RandomTypeUtil.getRandDate(r);
+    String randomDateString = randomDate.toString();
+    return randomDateString;
   }
 
   public static String randomPrimitiveTimestampStringObject(Random r) {
-    return RandomTypeUtil.getRandTimestamp(r).toString();
+    Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(r);
+    String randomTimestampString = randomTimestamp.toString();
+    return randomTimestampString;
   }
 
   public static HiveChar getRandHiveChar(Random r, CharTypeInfo charTypeInfo) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
index 458aae8..ec5ad23 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
@@ -19,17 +19,15 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import junit.framework.TestCase;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -37,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -58,6 +56,8 @@ import org.apache.hadoop.io.Text;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -240,7 +240,7 @@ public class VectorVerifyFast {
       case DATE:
       {
         Date value = deserializeRead.currentDateWritable.get();
-        Date expected = ((DateWritableV2) object).get();
+        Date expected = ((DateWritable) object).get();
         if (!value.equals(expected)) {
           TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
         }
@@ -249,7 +249,7 @@ public class VectorVerifyFast {
       case TIMESTAMP:
       {
         Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
-        Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
+        Timestamp expected = ((TimestampWritable) object).getTimestamp();
         if (!value.equals(expected)) {
           TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
         }
@@ -390,13 +390,13 @@ public class VectorVerifyFast {
       break;
       case DATE:
       {
-        Date value = ((DateWritableV2) object).get();
+        Date value = ((DateWritable) object).get();
         serializeWrite.writeDate(value);
       }
       break;
       case TIMESTAMP:
       {
-        Timestamp value = ((TimestampWritableV2) object).getTimestamp();
+        Timestamp value = ((TimestampWritable) object).getTimestamp();
         serializeWrite.writeTimestamp(value);
       }
       break;
@@ -567,9 +567,9 @@ public class VectorVerifyFast {
     case DECIMAL:
       return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritableV2(deserializeRead.currentDateWritable);
+      return new DateWritable(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
+      return new TimestampWritable(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
index b5ad22c..ffe9c81 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
@@ -18,13 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.junit.Assert;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -32,8 +25,12 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFMonth;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
@@ -41,33 +38,34 @@ import org.apache.hadoop.io.LongWritable;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.internal.runners.statements.Fail;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
 
 public class TestVectorDateExpressions {
 
   private ExecutorService runner;
 
   /* copied over from VectorUDFTimestampFieldLong */
-  private TimestampWritableV2 toTimestampWritable(long daysSinceEpoch) {
-    return new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
-            DateWritableV2.daysToMillis((int) daysSinceEpoch)));
+  private TimestampWritable toTimestampWritable(long daysSinceEpoch) {
+    Timestamp ts = new Timestamp(DateWritable.daysToMillis((int) daysSinceEpoch));
+    return new TimestampWritable(ts);
   }
 
   private int[] getAllBoundaries() {
     List<Integer> boundaries = new ArrayList<Integer>(1);
-    Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+    Calendar c = Calendar.getInstance();
     c.setTimeInMillis(0); // c.set doesn't reset millis
     for (int year = 1902; year <= 2038; year++) {
       c.set(year, Calendar.JANUARY, 1, 0, 0, 0);
@@ -110,12 +108,10 @@ public class TestVectorDateExpressions {
     return batch;
   }
 
-  private void compareToUDFYearDate(long t, int y) throws HiveException {
+  private void compareToUDFYearDate(long t, int y) {
     UDFYear udf = new UDFYear();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -174,12 +170,10 @@ public class TestVectorDateExpressions {
     verifyUDFYear(batch);
   }
 
-  private void compareToUDFDayOfMonthDate(long t, int y) throws HiveException {
+  private void compareToUDFDayOfMonthDate(long t, int y) {
     UDFDayOfMonth udf = new UDFDayOfMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -238,12 +232,10 @@ public class TestVectorDateExpressions {
     verifyUDFDayOfMonth(batch);
   }
 
-  private void compareToUDFMonthDate(long t, int y) throws HiveException {
+  private void compareToUDFMonthDate(long t, int y) {
     UDFMonth udf = new UDFMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -302,7 +294,7 @@ public class TestVectorDateExpressions {
     verifyUDFMonth(batch);
   }
 
-  private LongWritable getLongWritable(TimestampWritableV2 i) {
+  private LongWritable getLongWritable(TimestampWritable i) {
     LongWritable result = new LongWritable();
     if (i == null) {
       return null;
@@ -313,11 +305,11 @@ public class TestVectorDateExpressions {
   }
 
   private void compareToUDFUnixTimeStampDate(long t, long y) {
-    TimestampWritableV2 tsw = toTimestampWritable(t);
+    TimestampWritable tsw = toTimestampWritable(t);
     LongWritable res = getLongWritable(tsw);
     if(res.get() != y) {
       System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t,
-              tsw.getTimestamp().toEpochMilli()/1000);
+              tsw.getTimestamp().getTime()/1000);
     }
 
     Assert.assertEquals(res.get(), y);
@@ -380,7 +372,7 @@ public class TestVectorDateExpressions {
 
   private void compareToUDFWeekOfYearDate(long t, int y) {
     UDFWeekOfYear udf = new UDFWeekOfYear();
-    TimestampWritableV2 tsw = toTimestampWritable(t);
+    TimestampWritable tsw = toTimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
index aa91344..0bae9b4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.SettableListObjectInspector;
@@ -91,8 +91,7 @@ public class TestVectorExpressionWriters {
 
 
   private Writable getWritableValue(TypeInfo ti, Timestamp value) {
-    return new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(value.getTime(), value.getNanos()));
+    return new TimestampWritable(value);
   }
 
   private Writable getWritableValue(TypeInfo ti, HiveDecimal value) {
@@ -124,8 +123,7 @@ public class TestVectorExpressionWriters {
       return new BooleanWritable( value == 0 ? false : true);
     } else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) {
       Timestamp ts = new Timestamp(value);
-      TimestampWritableV2 tw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tw = new TimestampWritable(ts);
       return tw;
     }
     return null;
@@ -248,8 +246,8 @@ public class TestVectorExpressionWriters {
       Writable w = (Writable) vew.writeValue(tcv, i);
       if (w != null) {
         Writable expected = getWritableValue(type, timestampValues[i]);
-        TimestampWritableV2 t1 = (TimestampWritableV2) expected;
-        TimestampWritableV2 t2 = (TimestampWritableV2) w;
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) w;
         Assert.assertTrue(t1.equals(t2));
        } else {
         Assert.assertTrue(tcv.isNull[i]);
@@ -272,8 +270,8 @@ public class TestVectorExpressionWriters {
       values[i] = vew.setValue(values[i], tcv, i);
       if (values[i] != null) {
         Writable expected = getWritableValue(type, timestampValues[i]);
-        TimestampWritableV2 t1 = (TimestampWritableV2) expected;
-        TimestampWritableV2 t2 = (TimestampWritableV2) values[i];
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) values[i];
         Assert.assertTrue(t1.equals(t2));
       } else {
         Assert.assertTrue(tcv.isNull[i]);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
index e7884b2..9792951 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -40,23 +40,16 @@ import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 
 public class TestVectorGenericDateExpressions {
 
   private Charset utf8 = StandardCharsets.UTF_8;
   private int size = 200;
   private Random random = new Random();
-  private SimpleDateFormat formatter = getFormatter();
+  private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private List<PrimitiveCategory> dateTimestampStringTypes =
       Arrays.<PrimitiveCategory>asList(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP, PrimitiveCategory.STRING);
 
-  private static SimpleDateFormat getFormatter() {
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-    return formatter;
-  }
-
   private long newRandom(int i) {
     return random.nextInt(i);
   }
@@ -95,7 +88,7 @@ public class TestVectorGenericDateExpressions {
   }
 
   private Timestamp toTimestamp(long date) {
-    return new Timestamp(DateWritableV2.daysToMillis((int) date));
+    return new Timestamp(DateWritable.daysToMillis((int) date));
   }
 
   private BytesColumnVector toString(LongColumnVector date) {
@@ -114,7 +107,7 @@ public class TestVectorGenericDateExpressions {
   }
 
   private byte[] toString(long date) {
-    String formatted = formatter.format(new Date(DateWritableV2.daysToMillis((int) date)));
+    String formatted = formatter.format(new Date(DateWritable.daysToMillis((int) date)));
     return formatted.getBytes(utf8);
   }
 
@@ -675,7 +668,7 @@ public class TestVectorGenericDateExpressions {
       if (date.isNull[i]) {
         Assert.assertTrue(output.isNull[i]);
       } else {
-        String expected = formatter.format(new Date(DateWritableV2.daysToMillis((int) date.vector[i])));
+        String expected = formatter.format(new Date(DateWritable.daysToMillis((int) date.vector[i])));
         Assert.assertEquals(expected, actual);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
index 14d1343..e81844c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.Random;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDoub
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
 
@@ -287,8 +287,8 @@ public class TestVectorMathFunctions {
     outV = new LongColumnVector(longValues.length);
     for (int i = 0; i < longValues.length; i++) {
       Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
-      longValues[i] = TimestampWritableV2.getLong(randTimestamp);
-      inV.set(0, randTimestamp.toSqlTimestamp());
+      longValues[i] = TimestampWritable.getLong(randTimestamp);
+      inV.set(0, randTimestamp);
     }
 
     batch.cols[0] = inV;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
index 650bac4..f6dbd67 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
@@ -26,19 +26,13 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFHour;
-import org.apache.hadoop.hive.ql.udf.UDFMinute;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
-import org.apache.hadoop.hive.ql.udf.UDFSecond;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -46,13 +40,18 @@ import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFHour;
+import org.apache.hadoop.hive.ql.udf.UDFMinute;
+import org.apache.hadoop.hive.ql.udf.UDFMonth;
+import org.apache.hadoop.hive.ql.udf.UDFSecond;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.junit.Test;
 
@@ -61,13 +60,7 @@ import org.junit.Test;
  */
 public class TestVectorTimestampExpressions {
 
-  private SimpleDateFormat dateFormat = getFormatter();
-
-  private static SimpleDateFormat getFormatter() {
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-    return formatter;
-  }
+  private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
   private Timestamp[] getAllBoundaries(int minYear, int maxYear) {
      ArrayList<Timestamp> boundaries = new ArrayList<Timestamp>(1);
@@ -102,7 +95,7 @@ public class TestVectorTimestampExpressions {
     TimestampColumnVector tcv = new TimestampColumnVector(size);
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
-      tcv.set(i, RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
+      tcv.set(i, RandomTypeUtil.getRandTimestamp(rand));
     }
     batch.cols[0] = tcv;
     batch.cols[1] = new LongColumnVector(size);
@@ -116,7 +109,7 @@ public class TestVectorTimestampExpressions {
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
       /* all 32 bit numbers qualify & multiply up to get nano-seconds */
-      byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
+      byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand));
       bcv.vector[i] = encoded;
       bcv.start[i] = 0;
       bcv.length[i] = encoded.length;
@@ -231,13 +224,14 @@ public class TestVectorTimestampExpressions {
     TIMESTAMP_LONG, STRING_LONG
   }
 
-  private void compareToUDFYearLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFYearLong(Timestamp t, int y) {
     UDFYear udf = new UDFYear();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
+    if (res.get() != y) {
+      System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(),
+          tsw.getTimestamp().getTime()/1000);
+    }
     Assert.assertEquals(res.get(), y);
   }
 
@@ -328,13 +322,10 @@ public class TestVectorTimestampExpressions {
     Assert.assertEquals(true, lcv.isNull[0]);
   }
 
-  private void compareToUDFDayOfMonthLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFDayOfMonthLong(Timestamp t, int y) {
     UDFDayOfMonth udf = new UDFDayOfMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -418,13 +409,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFDayOfMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFHourLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFHourLong(Timestamp t, int y) {
     UDFHour udf = new UDFHour();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -507,13 +495,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFHour(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMinuteLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFMinuteLong(Timestamp t, int y) {
     UDFMinute udf = new UDFMinute();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -597,13 +582,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMinute(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMonthLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFMonthLong(Timestamp t, int y) {
     UDFMonth udf = new UDFMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -686,13 +668,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFSecondLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFSecondLong(Timestamp t, int y) {
     UDFSecond udf = new UDFSecond();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -865,8 +844,7 @@ public class TestVectorTimestampExpressions {
 
   private void compareToUDFWeekOfYearLong(Timestamp t, int y) {
     UDFWeekOfYear udf = new UDFWeekOfYear();
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
index 58e3fa3..fa8f465 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
@@ -18,20 +18,32 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import junit.framework.Assert;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
-import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorDateAddSub.ColumnScalarMode;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
@@ -44,23 +56,34 @@ import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import org.apache.hadoop.io.LongWritable;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
+import junit.framework.Assert;
+
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class TestVectorTimestampExtract {
 
@@ -85,6 +108,14 @@ public class TestVectorTimestampExtract {
     doTimestampExtractTests(random, "string");
   }
 
+  public enum TimestampExtractTestMode {
+    ROW_MODE,
+    ADAPTOR,
+    VECTOR_EXPRESSION;
+
+    static final int count = values().length;
+  }
+
   private void doTimestampExtractTests(Random random, String typeName)
       throws Exception {
 
@@ -161,15 +192,13 @@ public class TestVectorTimestampExtract {
       return;
     }
 
-    final GenericUDF udf;
+    final UDF udf;
     switch (extractFunctionName) {
     case "day":
       udf = new UDFDayOfMonth();
       break;
     case "dayofweek":
-      GenericUDFBridge dayOfWeekUDFBridge = new GenericUDFBridge();
-      dayOfWeekUDFBridge.setUdfClassName(UDFDayOfWeek.class.getName());
-      udf = dayOfWeekUDFBridge;
+      udf = new UDFDayOfWeek();
       break;
     case "hour":
       udf = new UDFHour();
@@ -184,9 +213,7 @@ public class TestVectorTimestampExtract {
       udf = new UDFSecond();
       break;
     case "yearweek":
-      GenericUDFBridge weekOfYearUDFBridge = new GenericUDFBridge();
-      weekOfYearUDFBridge.setUdfClassName(UDFWeekOfYear.class.getName());
-      udf = weekOfYearUDFBridge;
+      udf = new UDFWeekOfYear();
       break;
     case "year":
       udf = new UDFYear();
@@ -195,8 +222,11 @@ public class TestVectorTimestampExtract {
       throw new RuntimeException("Unexpected extract function name " + extractFunctionName);
     }
 
+    GenericUDFBridge genericUDFBridge = new GenericUDFBridge();
+    genericUDFBridge.setUdfClassName(udf.getClass().getName());
+
     ExprNodeGenericFuncDesc exprDesc =
-        new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
+        new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, genericUDFBridge, children);
 
     final int rowCount = randomRows.length;
     Object[][] resultObjectsArray = new Object[TimestampExtractTestMode.count][];
@@ -312,10 +342,10 @@ public class TestVectorTimestampExtract {
       Object result;
       switch (dateTimeStringPrimitiveCategory) {
       case TIMESTAMP:
-        result = evaluator.evaluate((TimestampWritableV2) object);
+        result = evaluator.evaluate((TimestampWritable) object);
         break;
       case DATE:
-        result = evaluator.evaluate((DateWritableV2) object);
+        result = evaluator.evaluate((DateWritable) object);
         break;
       case STRING:
         {
@@ -432,12 +462,4 @@ public class TestVectorTimestampExtract {
 
     return true;
   }
-
-  public enum TimestampExtractTestMode {
-    ROW_MODE,
-    ADAPTOR,
-    VECTOR_EXPRESSION;
-
-    static final int count = values().length;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 4ed087e..58ed151 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -22,8 +22,12 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.UnsupportedEncodingException;
 import java.math.BigDecimal;
+import java.math.MathContext;
+import java.math.RoundingMode;
 import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
@@ -31,7 +35,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -39,10 +43,12 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -82,7 +88,7 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     for (int i = 0; i < intValues.length; i++) {
       Timestamp timestamp = resultV.asScratchTimestamp(i);
-      long actual = DateWritableV2.millisToDays(timestamp.getTime());
+      long actual = DateWritable.millisToDays(timestamp.getTime());
       assertEquals(actual, intValues[i]);
     }
   }
@@ -149,8 +155,7 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     for (int i = 0; i < longValues.length; i++) {
       Timestamp timestamp = resultV.asScratchTimestamp(i);
-      long actual = TimestampWritableV2.getLong(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()));
+      long actual = TimestampWritable.getLong(timestamp);
       assertEquals(actual, longValues[i]);
     }
   }
@@ -513,8 +518,7 @@ public class TestVectorTypeCasts {
       Timestamp ts = new Timestamp(millis);
       int nanos = RandomTypeUtil.randomNanos(r);
       ts.setNanos(nanos);
-      TimestampWritableV2 tsw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tsw = new TimestampWritable(ts);
       double asDouble = tsw.getDouble();
       doubleValues[i] = asDouble;
       HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
@@ -578,8 +582,7 @@ public class TestVectorTypeCasts {
       long millis = RandomTypeUtil.randomMillis(r);
       Timestamp ts = new Timestamp(millis);
       ts.setNanos(optionalNanos);
-      TimestampWritableV2 tsw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tsw = new TimestampWritable(ts);
       hiveDecimalValues[i] = tsw.getHiveDecimal();
 
       tcv.set(i, ts);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
index c908f66..f257363 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedHashMap;
@@ -27,17 +29,15 @@ import java.util.Map;
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -63,6 +63,7 @@ import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 
 /**
  * (Copy of VerifyFast from serde).
@@ -244,7 +245,7 @@ public class VerifyFastRow {
         case DATE:
           {
             Date value = deserializeRead.currentDateWritable.get();
-            Date expected = ((DateWritableV2) object).get();
+            Date expected = ((DateWritable) object).get();
             if (!value.equals(expected)) {
               TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -253,7 +254,7 @@ public class VerifyFastRow {
         case TIMESTAMP:
           {
             Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
-            Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp expected = ((TimestampWritable) object).getTimestamp();
             if (!value.equals(expected)) {
               TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -394,13 +395,13 @@ public class VerifyFastRow {
           break;
         case DATE:
           {
-            Date value = ((DateWritableV2) object).get();
+            Date value = ((DateWritable) object).get();
             serializeWrite.writeDate(value);
           }
           break;
         case TIMESTAMP:
           {
-            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp value = ((TimestampWritable) object).getTimestamp();
             serializeWrite.writeTimestamp(value);
           }
           break;
@@ -571,9 +572,9 @@ public class VerifyFastRow {
     case DECIMAL:
       return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritableV2(deserializeRead.currentDateWritable);
+      return new DateWritable(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
+      return new TimestampWritable(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
index 878d286..3fe8b09 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
@@ -22,7 +22,7 @@ import java.sql.Timestamp;
 import java.util.Random;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -86,7 +86,7 @@ public class VectorizedRowGroupGenUtil {
     tcv.noNulls = !nulls;
     tcv.isRepeating = repeating;
 
-    Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
+    Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand);
 
     int nullFrequency = generateNullFrequency(rand);
 
@@ -98,7 +98,7 @@ public class VectorizedRowGroupGenUtil {
       }else {
         tcv.isNull[i] = false;
         if (!repeating) {
-          Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
+          Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
           tcv.set(i,  randomTimestamp);
           timestampValues[i] = randomTimestamp;
         } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
index 9bf9d9d..1064b19 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
@@ -22,7 +22,7 @@ import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.Random;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -31,6 +31,8 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType;
 import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.Text;
 
 public class VectorColumnGroupGenerator {
 
@@ -230,7 +232,7 @@ public class VectorColumnGroupGenerator {
 
     case TIMESTAMP:
       {
-        Timestamp value = RandomTypeUtil.getRandTimestamp(random).toSqlTimestamp();
+        Timestamp value = RandomTypeUtil.getRandTimestamp(random);
         ((Timestamp[]) array)[rowIndex] = value;
       }
       break;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
index c9a5812..ce25c3e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
@@ -26,14 +26,13 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -41,7 +40,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -58,6 +57,7 @@ import org.apache.hadoop.io.Text;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.sql.Timestamp;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -111,20 +111,20 @@ public class TestArrowColumnarBatchSerDe {
   private final static Timestamp NEGATIVE_TIMESTAMP_WITHOUT_NANOS;
 
   static {
-    TIMESTAMP = Timestamp.ofEpochMilli(TIME_IN_MILLIS);
-    NEGATIVE_TIMESTAMP_WITHOUT_NANOS = Timestamp.ofEpochMilli(NEGATIVE_TIME_IN_MILLIS);
+    TIMESTAMP = new Timestamp(TIME_IN_MILLIS);
+    NEGATIVE_TIMESTAMP_WITHOUT_NANOS = new Timestamp(NEGATIVE_TIME_IN_MILLIS);
   }
 
   private final static Object[][] DTI_ROWS = {
       {
-          new DateWritableV2(DateWritableV2.millisToDays(TIME_IN_MILLIS)),
-          new TimestampWritableV2(TIMESTAMP),
+          new DateWritable(DateWritable.millisToDays(TIME_IN_MILLIS)),
+          new TimestampWritable(TIMESTAMP),
           new HiveIntervalYearMonthWritable(new HiveIntervalYearMonth(1, 2)),
           new HiveIntervalDayTimeWritable(new HiveIntervalDayTime(1, 2, 3, 4, 5_000_000))
       },
       {
-          new DateWritableV2(DateWritableV2.millisToDays(NEGATIVE_TIME_IN_MILLIS)),
-          new TimestampWritableV2(NEGATIVE_TIMESTAMP_WITHOUT_NANOS),
+          new DateWritable(DateWritable.millisToDays(NEGATIVE_TIME_IN_MILLIS)),
+          new TimestampWritable(NEGATIVE_TIMESTAMP_WITHOUT_NANOS),
           null,
           null
       },
@@ -364,10 +364,10 @@ public class TestArrowColumnarBatchSerDe {
                         newArrayList(text("hello")),
                         input -> text(input.toString().toUpperCase())),
                     intW(0))), // c16:array<struct<m:map<string,string>,n:int>>
-            new TimestampWritableV2(TIMESTAMP), // c17:timestamp
+            new TimestampWritable(TIMESTAMP), // c17:timestamp
             decimalW(HiveDecimal.create(0, 0)), // c18:decimal(16,7)
             new BytesWritable("Hello".getBytes()), // c19:binary
-            new DateWritableV2(123), // c20:date
+            new DateWritable(123), // c20:date
             varcharW("x", 20), // c21:varchar(20)
             charW("y", 15), // c22:char(15)
             new BytesWritable("world!".getBytes()), // c23:binary
@@ -508,9 +508,9 @@ public class TestArrowColumnarBatchSerDe {
     Object[][] rows = new Object[size][];
     for (int i = 0; i < size; i++) {
       long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
-      Timestamp timestamp = Timestamp.ofEpochMilli(rand.nextBoolean() ? millis : -millis);
+      Timestamp timestamp = new Timestamp(rand.nextBoolean() ? millis : -millis);
       timestamp.setNanos(rand.nextInt(1000) * 1000);
-      rows[i] = new Object[] {new TimestampWritableV2(timestamp)};
+      rows[i] = new Object[] {new TimestampWritable(timestamp)};
     }
 
     initAndSerializeAndDeserialize(schema, rows);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 208aeb5..fe475f6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -28,6 +28,8 @@ import java.net.URISyntaxException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -48,9 +50,7 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidWriteIdList;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
@@ -181,8 +181,8 @@ public class TestInputOutputFormat {
       decimalValue = HiveDecimal.create(x);
       long millisUtc = x * MILLIS_IN_DAY;
       millisUtc -= LOCAL_TIMEZONE.getOffset(millisUtc);
-      dateValue = Date.ofEpochMilli(millisUtc);
-      timestampValue = Timestamp.ofEpochMilli(millisUtc);
+      dateValue = new Date(millisUtc);
+      timestampValue = new Timestamp(millisUtc);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index 97d4fc6..dc0da9c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -25,8 +25,11 @@ import static junit.framework.Assert.assertTrue;
 
 import java.io.File;
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -40,18 +43,16 @@ import com.google.common.primitives.Longs;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -541,7 +542,7 @@ public class TestOrcFile {
     while (rows.hasNext()) {
       Object row = rows.next(null);
       Timestamp tlistTimestamp = tslist.get(idx++);
-      if (tlistTimestamp.getNanos() != ((TimestampWritableV2) row).getNanos()) {
+      if (tlistTimestamp.getNanos() != ((TimestampWritable) row).getNanos()) {
         assertTrue(false);
       }
     }
@@ -1311,10 +1312,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for (int ms = 1000; ms < 2000; ++ms) {
         row.setFieldValue(0,
-            new TimestampWritableV2(Timestamp.valueOf(year + "-05-05 12:34:56."
+            new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56."
                 + ms)));
         row.setFieldValue(1,
-            new DateWritableV2(Date.of(year - 1900, 11, 25)));
+            new DateWritable(new Date(year - 1900, 11, 25)));
         writer.addRow(row);
       }
     }
@@ -1325,10 +1326,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for(int ms = 1000; ms < 2000; ++ms) {
         row = (OrcStruct) rows.next(row);
-        assertEquals(new TimestampWritableV2
+        assertEquals(new TimestampWritable
                 (Timestamp.valueOf(year + "-05-05 12:34:56." + ms)),
             row.getFieldValue(0));
-        assertEquals(new DateWritableV2(Date.of(year - 1900, 11, 25)),
+        assertEquals(new DateWritable(new Date(year - 1900, 11, 25)),
             row.getFieldValue(1));
       }
     }
@@ -1382,12 +1383,12 @@ public class TestOrcFile {
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
-    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
     HiveDecimal value = HiveDecimal.create("12345678.6547456");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     union.set((byte) 0, new IntWritable(42));
     writer.addRow(row);
-    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
     union.set((byte) 1, new Text("hello"));
     value = HiveDecimal.create("-5643.234");
     row.setFieldValue(2, new HiveDecimalWritable(value));
@@ -1402,14 +1403,14 @@ public class TestOrcFile {
     union.set((byte) 1, null);
     writer.addRow(row);
     union.set((byte) 0, new IntWritable(200000));
-    row.setFieldValue(0, new TimestampWritableV2
+    row.setFieldValue(0, new TimestampWritable
         (Timestamp.valueOf("1970-01-01 00:00:00")));
     value = HiveDecimal.create("10000000000000000000");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     writer.addRow(row);
     Random rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
-      row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf(i +
+      row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i +
           "-05-05 12:34:56." + i)));
       if ((i & 1) == 0) {
         union.set((byte) 0, new IntWritable(i*i));
@@ -1489,7 +1490,7 @@ public class TestOrcFile {
     inspector = reader.getObjectInspector();
     assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
         inspector.getTypeName());
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),
         row.getFieldValue(0));
     union = (OrcUnion) row.getFieldValue(1);
     assertEquals(0, union.getTag());
@@ -1498,7 +1499,7 @@ public class TestOrcFile {
         row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
     assertEquals(2, rows.getRowNumber());
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());
@@ -1520,7 +1521,7 @@ public class TestOrcFile {
     assertEquals(null, union.getObject());
     assertEquals(null, row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("1970-01-01 00:00:00")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),
         row.getFieldValue(0));
     assertEquals(new IntWritable(200000), union.getObject());
     assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),
@@ -1528,7 +1529,7 @@ public class TestOrcFile {
     rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
       row = (OrcStruct) rows.next(row);
-      assertEquals(new TimestampWritableV2(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
+      assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
           row.getFieldValue(0));
       if ((i & 1) == 0) {
         assertEquals(0, union.getTag());
@@ -1555,7 +1556,7 @@ public class TestOrcFile {
     assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
     rows.seekToRow(1);
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
index 092da69..81d2e2d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
@@ -420,12 +420,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1668, rawDataSize);
+    assertEquals(1740, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1668, reader.getRawDataSize());
+    assertEquals(1740, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -438,9 +438,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,
@@ -514,12 +514,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1668, rawDataSize);
+    assertEquals(1740, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1668, reader.getRawDataSize());
+    assertEquals(1740, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -532,9 +532,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
index 2071d13..c23f00e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.File;
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
 import java.util.Random;
 
 import junit.framework.Assert;
@@ -27,9 +29,7 @@ import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -38,17 +38,18 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.orc.TypeDescription;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -133,7 +134,7 @@ public class TestVectorizedORCReader {
     for (int i = 0; i < 21000; ++i) {
       if ((i % 7) != 0) {
         writer.addRow(new MyRecord(((i % 3) == 0), (byte)(i % 5), i, (long) 200, (short) (300 + i), (double) (400 + i),
-            words[r1.nextInt(words.length)], Timestamp.valueOf(LocalDateTime.now().toString()),
+            words[r1.nextInt(words.length)], new Timestamp(Calendar.getInstance().getTime().getTime()),
             Date.valueOf(dates[i % 3]), HiveDecimal.create(decimalStrings[i % decimalStrings.length])));
       } else {
         writer.addRow(new MyRecord(null, null, i, (long) 200, null, null, null, null, null, null));
@@ -173,21 +174,19 @@ public class TestVectorizedORCReader {
             Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
             long b = ((LongColumnVector) cv).vector[rowId];
             Assert.assertEquals(temp.toString(), Long.toString(b));
-          } else if (a instanceof TimestampWritableV2) {
+          } else if (a instanceof TimestampWritable) {
             // Timestamps are stored as long, so convert and compare
-            TimestampWritableV2 t = ((TimestampWritableV2) a);
+            TimestampWritable t = ((TimestampWritable) a);
             TimestampColumnVector tcv = ((TimestampColumnVector) cv);
-            java.sql.Timestamp ts = tcv.asScratchTimestamp(rowId);
-            Assert.assertEquals(
-                t.getTimestamp(), Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+            Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
 
-          } else if (a instanceof DateWritableV2) {
+          } else if (a instanceof DateWritable) {
             // Dates are stored as long, so convert and compare
 
-            DateWritableV2 adt = (DateWritableV2) a;
+            DateWritable adt = (DateWritable) a;
             long b = ((LongColumnVector) cv).vector[rowId];
-            Assert.assertEquals(adt.get().toEpochMilli(),
-                DateWritableV2.daysToMillis((int) b));
+            Assert.assertEquals(adt.get().getTime(),
+                DateWritable.daysToMillis((int) b));
 
           } else if (a instanceof HiveDecimalWritable) {
             // Decimals are stored as BigInteger, so convert and compare

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
index 1d32afe..a230441 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -63,6 +62,7 @@ import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.schema.MessageType;
 
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.List;
 
@@ -209,9 +209,7 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected static NanoTime getNanoTime(int index) {
-    Timestamp ts = new Timestamp();
-    ts.setTimeInMillis(index);
-    return NanoTimeUtils.getNanoTime(ts, false);
+    return NanoTimeUtils.getNanoTime(new Timestamp(index), false);
   }
 
   protected static HiveDecimal getDecimal(
@@ -378,13 +376,8 @@ public class VectorizedColumnReaderTestBase {
           if (c == nElements) {
             break;
           }
-          Timestamp expected = new Timestamp();
-          if (isDictionaryEncoding) {
-            expected.setTimeInMillis(c % UNIQUE_NUM);
-          } else {
-            expected.setTimeInMillis(c);
-          }
-          assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c);
+          assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i));
           assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
           assertFalse(vector.isNull[i]);
           c++;
@@ -415,12 +408,8 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
 
-          Timestamp expected = new Timestamp();
-          if (isDictionaryEncoding) {
-            expected.setTimeInMillis(c % UNIQUE_NUM);
-          } else {
-            expected.setTimeInMillis(c);
-          };
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(
+              c);
           String actual = new String(Arrays
               .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
           assertEquals("Not the same time at " + c, expected.toString(), actual);