You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/06/22 17:46:44 UTC

[28/35] hive git commit: HIVE-12192: Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index dc0da9c..97d4fc6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -25,11 +25,8 @@ import static junit.framework.Assert.assertTrue;
 
 import java.io.File;
 import java.io.IOException;
-import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.ByteBuffer;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -43,16 +40,18 @@ import com.google.common.primitives.Longs;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -542,7 +541,7 @@ public class TestOrcFile {
     while (rows.hasNext()) {
       Object row = rows.next(null);
       Timestamp tlistTimestamp = tslist.get(idx++);
-      if (tlistTimestamp.getNanos() != ((TimestampWritable) row).getNanos()) {
+      if (tlistTimestamp.getNanos() != ((TimestampWritableV2) row).getNanos()) {
         assertTrue(false);
       }
     }
@@ -1312,10 +1311,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for (int ms = 1000; ms < 2000; ++ms) {
         row.setFieldValue(0,
-            new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56."
+            new TimestampWritableV2(Timestamp.valueOf(year + "-05-05 12:34:56."
                 + ms)));
         row.setFieldValue(1,
-            new DateWritable(new Date(year - 1900, 11, 25)));
+            new DateWritableV2(Date.of(year - 1900, 11, 25)));
         writer.addRow(row);
       }
     }
@@ -1326,10 +1325,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for(int ms = 1000; ms < 2000; ++ms) {
         row = (OrcStruct) rows.next(row);
-        assertEquals(new TimestampWritable
+        assertEquals(new TimestampWritableV2
                 (Timestamp.valueOf(year + "-05-05 12:34:56." + ms)),
             row.getFieldValue(0));
-        assertEquals(new DateWritable(new Date(year - 1900, 11, 25)),
+        assertEquals(new DateWritableV2(Date.of(year - 1900, 11, 25)),
             row.getFieldValue(1));
       }
     }
@@ -1383,12 +1382,12 @@ public class TestOrcFile {
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
-    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
+    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")));
     HiveDecimal value = HiveDecimal.create("12345678.6547456");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     union.set((byte) 0, new IntWritable(42));
     writer.addRow(row);
-    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
+    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
     union.set((byte) 1, new Text("hello"));
     value = HiveDecimal.create("-5643.234");
     row.setFieldValue(2, new HiveDecimalWritable(value));
@@ -1403,14 +1402,14 @@ public class TestOrcFile {
     union.set((byte) 1, null);
     writer.addRow(row);
     union.set((byte) 0, new IntWritable(200000));
-    row.setFieldValue(0, new TimestampWritable
+    row.setFieldValue(0, new TimestampWritableV2
         (Timestamp.valueOf("1970-01-01 00:00:00")));
     value = HiveDecimal.create("10000000000000000000");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     writer.addRow(row);
     Random rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
-      row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i +
+      row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf(i +
           "-05-05 12:34:56." + i)));
       if ((i & 1) == 0) {
         union.set((byte) 0, new IntWritable(i*i));
@@ -1490,7 +1489,7 @@ public class TestOrcFile {
     inspector = reader.getObjectInspector();
     assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
         inspector.getTypeName());
-    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),
+    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")),
         row.getFieldValue(0));
     union = (OrcUnion) row.getFieldValue(1);
     assertEquals(0, union.getTag());
@@ -1499,7 +1498,7 @@ public class TestOrcFile {
         row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
     assertEquals(2, rows.getRowNumber());
-    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());
@@ -1521,7 +1520,7 @@ public class TestOrcFile {
     assertEquals(null, union.getObject());
     assertEquals(null, row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),
+    assertEquals(new TimestampWritableV2(Timestamp.valueOf("1970-01-01 00:00:00")),
         row.getFieldValue(0));
     assertEquals(new IntWritable(200000), union.getObject());
     assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),
@@ -1529,7 +1528,7 @@ public class TestOrcFile {
     rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
       row = (OrcStruct) rows.next(row);
-      assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
+      assertEquals(new TimestampWritableV2(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
           row.getFieldValue(0));
       if ((i & 1) == 0) {
         assertEquals(0, union.getTag());
@@ -1556,7 +1555,7 @@ public class TestOrcFile {
     assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
     rows.seekToRow(1);
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
index 81d2e2d..092da69 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
@@ -420,12 +420,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1740, rawDataSize);
+    assertEquals(1668, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1740, reader.getRawDataSize());
+    assertEquals(1668, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -438,9 +438,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,
@@ -514,12 +514,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1740, rawDataSize);
+    assertEquals(1668, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1740, reader.getRawDataSize());
+    assertEquals(1668, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -532,9 +532,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
index c23f00e..2071d13 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
@@ -19,9 +19,7 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.File;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Calendar;
+import java.time.LocalDateTime;
 import java.util.Random;
 
 import junit.framework.Assert;
@@ -29,7 +27,9 @@ import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -38,18 +38,17 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.orc.TypeDescription;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -134,7 +133,7 @@ public class TestVectorizedORCReader {
     for (int i = 0; i < 21000; ++i) {
       if ((i % 7) != 0) {
         writer.addRow(new MyRecord(((i % 3) == 0), (byte)(i % 5), i, (long) 200, (short) (300 + i), (double) (400 + i),
-            words[r1.nextInt(words.length)], new Timestamp(Calendar.getInstance().getTime().getTime()),
+            words[r1.nextInt(words.length)], Timestamp.valueOf(LocalDateTime.now().toString()),
             Date.valueOf(dates[i % 3]), HiveDecimal.create(decimalStrings[i % decimalStrings.length])));
       } else {
         writer.addRow(new MyRecord(null, null, i, (long) 200, null, null, null, null, null, null));
@@ -174,19 +173,21 @@ public class TestVectorizedORCReader {
             Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
             long b = ((LongColumnVector) cv).vector[rowId];
             Assert.assertEquals(temp.toString(), Long.toString(b));
-          } else if (a instanceof TimestampWritable) {
+          } else if (a instanceof TimestampWritableV2) {
             // Timestamps are stored as long, so convert and compare
-            TimestampWritable t = ((TimestampWritable) a);
+            TimestampWritableV2 t = ((TimestampWritableV2) a);
             TimestampColumnVector tcv = ((TimestampColumnVector) cv);
-            Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
+            java.sql.Timestamp ts = tcv.asScratchTimestamp(rowId);
+            Assert.assertEquals(
+                t.getTimestamp(), Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
 
-          } else if (a instanceof DateWritable) {
+          } else if (a instanceof DateWritableV2) {
             // Dates are stored as long, so convert and compare
 
-            DateWritable adt = (DateWritable) a;
+            DateWritableV2 adt = (DateWritableV2) a;
             long b = ((LongColumnVector) cv).vector[rowId];
-            Assert.assertEquals(adt.get().getTime(),
-                DateWritable.daysToMillis((int) b));
+            Assert.assertEquals(adt.get().toEpochMilli(),
+                DateWritableV2.daysToMillis((int) b));
 
           } else if (a instanceof HiveDecimalWritable) {
             // Decimals are stored as BigInteger, so convert and compare

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
index a230441..1d32afe 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -62,7 +63,6 @@ import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.schema.MessageType;
 
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.List;
 
@@ -209,7 +209,9 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected static NanoTime getNanoTime(int index) {
-    return NanoTimeUtils.getNanoTime(new Timestamp(index), false);
+    Timestamp ts = new Timestamp();
+    ts.setTimeInMillis(index);
+    return NanoTimeUtils.getNanoTime(ts, false);
   }
 
   protected static HiveDecimal getDecimal(
@@ -376,8 +378,13 @@ public class VectorizedColumnReaderTestBase {
           if (c == nElements) {
             break;
           }
-          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c);
-          assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i));
+          Timestamp expected = new Timestamp();
+          if (isDictionaryEncoding) {
+            expected.setTimeInMillis(c % UNIQUE_NUM);
+          } else {
+            expected.setTimeInMillis(c);
+          }
+          assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
           assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
           assertFalse(vector.isNull[i]);
           c++;
@@ -408,8 +415,12 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
 
-          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(
-              c);
+          Timestamp expected = new Timestamp();
+          if (isDictionaryEncoding) {
+            expected.setTimeInMillis(c % UNIQUE_NUM);
+          } else {
+            expected.setTimeInMillis(c);
+          };
           String actual = new String(Arrays
               .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
           assertEquals("Not the same time at " + c, expected.toString(), actual);

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
index d14f0a9..477825e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
@@ -13,19 +13,18 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.serde;
 
-import java.sql.Timestamp;
 import java.util.Calendar;
-import java.util.Date;
 import java.util.GregorianCalendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
 
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
 
 
 /**
@@ -42,7 +41,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.HOUR_OF_DAY, 0);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    Timestamp ts = new Timestamp(cal.getTimeInMillis());
+    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis());
     NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
     Assert.assertEquals(nt.getJulianDay(), 2440000);
 
@@ -57,7 +56,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.HOUR_OF_DAY, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
+    Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
     NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false);
 
     Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -70,7 +69,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.HOUR_OF_DAY, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
 
-    Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
+    Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
     NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false);
 
     Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -86,7 +85,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.HOUR_OF_DAY, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    ts1 = new Timestamp(cal1.getTimeInMillis());
+    ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
     nt1 = NanoTimeUtils.getNanoTime(ts1, false);
 
     ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -99,7 +98,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.HOUR_OF_DAY, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
 
-    ts2 = new Timestamp(cal2.getTimeInMillis());
+    ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
     nt2 = NanoTimeUtils.getNanoTime(ts2, false);
 
     ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -117,8 +116,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 1);
     cal.set(Calendar.SECOND, 1);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts = new Timestamp(cal.getTimeInMillis());
-    ts.setNanos(1);
+    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
 
     //(1*60*60 + 1*60 + 1) * 10e9 + 1
     NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -133,8 +131,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 59);
     cal.set(Calendar.SECOND, 59);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
-    ts = new Timestamp(cal.getTimeInMillis());
-    ts.setNanos(999999999);
+    ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999);
 
     //(23*60*60 + 59*60 + 59)*10e9 + 999999999
     nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -149,8 +146,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.MINUTE, 10);
     cal2.set(Calendar.SECOND, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
-    ts2.setNanos(10);
+    Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10);
 
     Calendar cal1 = Calendar.getInstance();
     cal1.set(Calendar.YEAR,  1968);
@@ -160,8 +156,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.MINUTE, 0);
     cal1.set(Calendar.SECOND, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
-    ts1.setNanos(1);
+    Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1);
 
     NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, false);
     NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, false);
@@ -183,8 +178,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 1);
     cal.set(Calendar.SECOND, 1);
     cal.setTimeZone(TimeZone.getTimeZone("US/Pacific"));
-    Timestamp ts = new Timestamp(cal.getTimeInMillis());
-    ts.setNanos(1);
+    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
 
     /**
      * 17:00 PDT = 00:00 GMT (daylight-savings)
@@ -212,15 +206,15 @@ public class TestParquetTimestampUtils extends TestCase {
   public void testTimezoneless() {
     Timestamp ts1 = Timestamp.valueOf("2011-01-01 00:30:30.111111111");
     NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, true);
-    Assert.assertEquals(nt1.getJulianDay(), 2455563);
-    Assert.assertEquals(nt1.getTimeOfDayNanos(), 1830111111111L);
+    Assert.assertEquals(nt1.getJulianDay(), 2455562);
+    Assert.assertEquals(nt1.getTimeOfDayNanos(), 59430111111111L);
     Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, true);
     Assert.assertEquals(ts1Fetched.toString(), ts1.toString());
 
     Timestamp ts2 = Timestamp.valueOf("2011-02-02 08:30:30.222222222");
     NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, true);
     Assert.assertEquals(nt2.getJulianDay(), 2455595);
-    Assert.assertEquals(nt2.getTimeOfDayNanos(), 30630222222222L);
+    Assert.assertEquals(nt2.getTimeOfDayNanos(), 1830222222222L);
     Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, true);
     Assert.assertEquals(ts2Fetched.toString(), ts2.toString());
   }
@@ -251,7 +245,7 @@ public class TestParquetTimestampUtils extends TestCase {
 
     //test some extreme cases.
     verifyTsString("9999-09-09 09:09:09.999999999", local);
-    verifyTsString("0001-01-01 00:00:00.0", local);
+    verifyTsString("0001-01-01 00:00:00", local);
   }
 
   private void verifyTsString(String tsString, boolean local) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
index 406cece..97695c2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
@@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.parse;
 
 import static org.junit.Assert.*;
 
-import java.sql.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.junit.Test;
 
 public class TestSemanticAnalyzer {
@@ -61,7 +61,7 @@ public class TestSemanticAnalyzer {
     BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, colValue);
     assertEquals(result, partSpec.get(colName));
     if (colValue instanceof Date) {
-      DateWritable dw = new DateWritable((Date)colValue);
+      DateWritableV2 dw = new DateWritableV2((Date)colValue);
       BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, dw);
       assertEquals(result, partSpec.get(colName));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
index 9f20ff6..4770ab7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
@@ -17,14 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.udf;
 
-import java.sql.Timestamp;
 import java.time.Instant;
 import java.time.ZoneId;
-import java.time.ZoneOffset;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.junit.Test;
 
 import junit.framework.TestCase;
@@ -37,56 +36,56 @@ public class TestUDFDateFormatGranularity extends TestCase {
   public void testTimestampToTimestampWithGranularity() throws Exception {
     // Running example
     // Friday 30th August 1985 02:47:02 AM
-    final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L));
+    final TimestampWritableV2 t = new TimestampWritableV2(Timestamp.ofEpochMilli(494243222000L));
     UDFDateFloor g;
 
     // Year granularity
     // Tuesday 1st January 1985 12:00:00 AM
     g = new UDFDateFloorYear();
-    TimestampWritable i1 = g.evaluate(t);
-    assertEquals(473414400000L, i1.getTimestamp().getTime());
+    TimestampWritableV2 i1 = g.evaluate(t);
+    assertEquals(473385600000L, i1.getTimestamp().toEpochMilli());
     
     // Quarter granularity
     // Monday 1st July 1985 12:00:00 AM
     g = new UDFDateFloorQuarter();
-    TimestampWritable i2 = g.evaluate(t);
-    assertEquals(489049200000L, i2.getTimestamp().getTime());
+    TimestampWritableV2 i2 = g.evaluate(t);
+    assertEquals(489024000000L, i2.getTimestamp().toEpochMilli());
 
     // Month granularity
     // Thursday 1st August 1985 12:00:00 AM
     g = new UDFDateFloorMonth();
-    TimestampWritable i3 = g.evaluate(t);
-    assertEquals(491727600000L, i3.getTimestamp().getTime());
+    TimestampWritableV2 i3 = g.evaluate(t);
+    assertEquals(491702400000L, i3.getTimestamp().toEpochMilli());
 
     // Week granularity
     // Monday 26th August 1985 12:00:00 AM
     g = new UDFDateFloorWeek();
-    TimestampWritable i4 = g.evaluate(t);
-    assertEquals(493887600000L, i4.getTimestamp().getTime());
+    TimestampWritableV2 i4 = g.evaluate(t);
+    assertEquals(493862400000L, i4.getTimestamp().toEpochMilli());
 
     // Day granularity
     // Friday 30th August 1985 12:00:00 AM
     g = new UDFDateFloorDay();
-    TimestampWritable i5 = g.evaluate(t);
-    assertEquals(494233200000L, i5.getTimestamp().getTime());
+    TimestampWritableV2 i5 = g.evaluate(t);
+    assertEquals(494208000000L, i5.getTimestamp().toEpochMilli());
 
     // Hour granularity
     // Friday 30th August 1985 02:00:00 AM
     g = new UDFDateFloorHour();
-    TimestampWritable i6 = g.evaluate(t);
-    assertEquals(494240400000L, i6.getTimestamp().getTime());
+    TimestampWritableV2 i6 = g.evaluate(t);
+    assertEquals(494240400000L, i6.getTimestamp().toEpochMilli());
 
     // Minute granularity
     // Friday 30th August 1985 02:47:00 AM
     g = new UDFDateFloorMinute();
-    TimestampWritable i7 = g.evaluate(t);
-    assertEquals(494243220000L, i7.getTimestamp().getTime());
+    TimestampWritableV2 i7 = g.evaluate(t);
+    assertEquals(494243220000L, i7.getTimestamp().toEpochMilli());
 
     // Second granularity
     // Friday 30th August 1985 02:47:02 AM
     g = new UDFDateFloorSecond();
-    TimestampWritable i8 = g.evaluate(t);
-    assertEquals(494243222000L, i8.getTimestamp().getTime());
+    TimestampWritableV2 i8 = g.evaluate(t);
+    assertEquals(494243222000L, i8.getTimestamp().toEpochMilli());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
index 0db9370..7c2ee15 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
@@ -19,19 +19,19 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
-import java.sql.Timestamp;
 
 public class TestGenericUDFAddMonths extends TestCase {
 
@@ -151,41 +151,29 @@ public class TestGenericUDFAddMonths extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFAddMonths udf = new GenericUDFAddMonths();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-      ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    GenericUDFAddMonths udf = new GenericUDFAddMonths();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-      udf.initialize(arguments);
-      runAndVerify("2014-02-30", 1, "2014-04-02", udf);
-      runAndVerify("2014-02-32", 1, "2014-04-04", udf);
-      runAndVerify("2014-01", 1, null, udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    udf.initialize(arguments);
+    runAndVerify("2014-02-30", 1, "2014-04-02", udf);
+    runAndVerify("2014-02-32", 1, "2014-04-04", udf);
+    runAndVerify("2014-01", 1, null, udf);
   }
 
   public void testWrongTsStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFAddMonths udf = new GenericUDFAddMonths();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-      ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    GenericUDFAddMonths udf = new GenericUDFAddMonths();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-      udf.initialize(arguments);
+    udf.initialize(arguments);
 
-      runAndVerify("2014-02-30 10:30:00", 1, "2014-04-02", udf);
-      runAndVerify("2014-02-32 10:30:00", 1, "2014-04-04", udf);
-      runAndVerify("2014/01/31 10:30:00", 1, null, udf);
-      runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    runAndVerify("2014-02-30 10:30:00", 1, "2014-04-02", udf);
+    runAndVerify("2014-02-32 10:30:00", 1, "2014-04-04", udf);
+    runAndVerify("2014/01/31 10:30:00", 1, null, udf);
+    runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf);
   }
 
   public void testAddMonthsShort() throws HiveException {
@@ -250,7 +238,7 @@ public class TestGenericUDFAddMonths extends TestCase {
 
   private void runAndVerify(Timestamp ts, int months, Text dateFormat, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritable(ts));
+    DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritableV2(ts));
     DeferredObject valueObj1 = new DeferredJavaObject(new IntWritable(months));
     DeferredObject valueObj2 = new DeferredJavaObject(dateFormat);
     DeferredObject[] args = {valueObj0, valueObj1, valueObj2};

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
index 0acb46d..dcb4d9c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
@@ -18,22 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
+import junit.framework.TestCase;
+
 public class TestGenericUDFDate extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDate udf = new GenericUDFDate();
@@ -43,13 +43,13 @@ public class TestGenericUDFDate extends TestCase {
     udf.initialize(arguments);
     DeferredObject valueObj = new DeferredJavaObject(new Text("2009-07-30"));
     DeferredObject[] args = {valueObj};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
     assertEquals("to_date() test for STRING failed ", "2009-07-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritable) udf.evaluate(nullArgs);
+    output = (DateWritableV2) udf.evaluate(nullArgs);
     assertNull("to_date() with null STRING", output);
   }
 
@@ -59,16 +59,16 @@ public class TestGenericUDFDate extends TestCase {
     ObjectInspector[] arguments = {valueOI};
 
     udf.initialize(arguments);
-    DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
-        30, 4, 17, 52, 0)));
+    DeferredObject valueObj = new DeferredJavaObject(new TimestampWritableV2(
+        Timestamp.valueOf(LocalDateTime.of(109, 06, 30, 4, 17, 52, 0).toString())));
     DeferredObject[] args = {valueObj};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("to_date() test for TIMESTAMP failed ", "2009-07-30", output.toString());
+    assertEquals("to_date() test for TIMESTAMP failed ", "0109-06-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritable) udf.evaluate(nullArgs);
+    output = (DateWritableV2) udf.evaluate(nullArgs);
     assertNull("to_date() with null TIMESTAMP", output);
   }
 
@@ -78,15 +78,15 @@ public class TestGenericUDFDate extends TestCase {
     ObjectInspector[] arguments = {valueOI};
 
     udf.initialize(arguments);
-    DeferredObject valueObj = new DeferredJavaObject(new DateWritable(new Date(109, 06, 30)));
+    DeferredObject valueObj = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 30)));
     DeferredObject[] args = {valueObj};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("to_date() test for DATEWRITABLE failed ", "2009-07-30", output.toString());
+    assertEquals("to_date() test for DATEWRITABLE failed ", "0109-06-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritable) udf.evaluate(nullArgs);
+    output = (DateWritableV2) udf.evaluate(nullArgs);
     assertNull("to_date() with null DATE", output);
   }
 
@@ -97,7 +97,7 @@ public class TestGenericUDFDate extends TestCase {
 
     udf.initialize(arguments);
     DeferredObject[] args = { new DeferredJavaObject(null) };
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
     // Try with null VOID
     assertNull("to_date() with null DATE ", output);

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
index 9caf3b7..d74a4ef 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
@@ -18,21 +18,21 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
+import junit.framework.TestCase;
+
 public class TestGenericUDFDateAdd extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateAdd udf = new GenericUDFDateAdd();
@@ -44,7 +44,7 @@ public class TestGenericUDFDateAdd extends TestCase {
     DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
     assertEquals("date_add() test for STRING failed ", "2009-07-22", output.toString());
 
@@ -66,13 +66,13 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
-        20, 4, 17, 52, 0)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
+        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString())));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for TIMESTAMP failed ", "2009-07-23", output.toString());
+    assertEquals("date_add() test for TIMESTAMP failed ", "0109-06-23", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -93,12 +93,12 @@ public class TestGenericUDFDateAdd extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for DATEWRITABLE failed ", "2009-07-24", output.toString());
+    assertEquals("date_add() test for DATEWRITABLE failed ", "0109-06-24", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -118,12 +118,12 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for BYTE failed ", "2009-07-24", output.toString());
+    assertEquals("date_add() test for BYTE failed ", "0109-06-24", output.toString());
   }
 
   public void testShortDataTypeAsDays() throws HiveException {
@@ -133,11 +133,11 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for SHORT failed ", "2009-07-24", output.toString());
+    assertEquals("date_add() test for SHORT failed ", "0109-06-24", output.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
index 3f4ea3f..ea183d4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
@@ -18,22 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
+import junit.framework.TestCase;
+
 public class TestGenericUDFDateDiff extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateDiff udf = new GenericUDFDateDiff();
@@ -67,10 +67,10 @@ public class TestGenericUDFDateDiff extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
-        20, 0, 0, 0, 0)));
-    DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
-        17, 0, 0, 0, 0)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
+        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 0, 0, 0, 0).toString())));
+    DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritableV2(
+        Timestamp.valueOf(LocalDateTime.of(109, 06, 17, 0, 0, 0, 0).toString())));
     DeferredObject[] args = {valueObj1, valueObj2};
     IntWritable output = (IntWritable) udf.evaluate(args);
 
@@ -95,8 +95,8 @@ public class TestGenericUDFDateDiff extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
-    DeferredObject valueObj2 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 10)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj2 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 10)));
     DeferredObject[] args = {valueObj1, valueObj2};
     IntWritable output = (IntWritable) udf.evaluate(args);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
index d29d964..6a3cdda 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
@@ -17,21 +17,19 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.Text;
-import org.junit.Assume;
 
 public class TestGenericUDFDateFormat extends TestCase {
 
@@ -67,24 +65,18 @@ public class TestGenericUDFDateFormat extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFDateFormat udf = new GenericUDFDateFormat();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      Text fmtText = new Text("EEEE");
-      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
-              .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
-      ObjectInspector[] arguments = { valueOI0, valueOI1 };
-
-      udf.initialize(arguments);
-      runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
-      runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
-      runAndVerifyStr("01/14/2014", fmtText, null, udf);
-      runAndVerifyStr(null, fmtText, null, udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    GenericUDFDateFormat udf = new GenericUDFDateFormat();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    Text fmtText = new Text("EEEE");
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
+        .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
+    ObjectInspector[] arguments = {valueOI0, valueOI1};
+
+    udf.initialize(arguments);
+    runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
+    runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
+    runAndVerifyStr("01/14/2014", fmtText, null, udf);
+    runAndVerifyStr(null, fmtText, null, udf);
   }
 
   public void testDateFormatDate() throws HiveException {
@@ -165,7 +157,7 @@ public class TestGenericUDFDateFormat extends TestCase {
 
   private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritable(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
         Date.valueOf(str)) : null);
     DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
     DeferredObject[] args = { valueObj0, valueObj1 };
@@ -175,7 +167,7 @@ public class TestGenericUDFDateFormat extends TestCase {
 
   private void runAndVerifyTs(String str, Text fmtText, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2(
         Timestamp.valueOf(str)) : null);
     DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
     DeferredObject[] args = { valueObj0, valueObj1 };

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
index cb00cfd..c71c2b7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
@@ -18,21 +18,21 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
+import junit.framework.TestCase;
+
 public class TestGenericUDFDateSub extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateSub udf = new GenericUDFDateSub();
@@ -44,7 +44,7 @@ public class TestGenericUDFDateSub extends TestCase {
     DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
     assertEquals("date_sub() test for STRING failed ", "2009-07-18", output.toString());
 
@@ -66,13 +66,13 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
-        20, 4, 17, 52, 0)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
+        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString())));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_sub() test for TIMESTAMP failed ", "2009-07-17", output.toString());
+    assertEquals("date_sub() test for TIMESTAMP failed ", "0109-06-17", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -93,12 +93,12 @@ public class TestGenericUDFDateSub extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_sub() test for DATEWRITABLE failed ", "2009-07-16", output.toString());
+    assertEquals("date_sub() test for DATEWRITABLE failed ", "0109-06-16", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -118,12 +118,12 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for BYTE failed ", "2009-07-16", output.toString());
+    assertEquals("date_add() test for BYTE failed ", "0109-06-16", output.toString());
   }
 
   public void testShortDataTypeAsDays() throws HiveException {
@@ -133,11 +133,11 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritable output = (DateWritable) udf.evaluate(args);
+    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
 
-    assertEquals("date_add() test for SHORT failed ", "2009-07-16", output.toString());
+    assertEquals("date_add() test for SHORT failed ", "0109-06-16", output.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
index bc8572e..bb9918c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
@@ -18,17 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
 import junit.framework.TestCase;

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
index 4677aa7..9787454 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
@@ -18,12 +18,13 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -32,8 +33,6 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-import java.sql.Date;
-
 public class TestGenericUDFGreatest extends TestCase {
 
   public void testOneArg() throws HiveException {
@@ -210,7 +209,7 @@ public class TestGenericUDFGreatest extends TestCase {
     } else if (o instanceof Double) {
       return o != null ? new DoubleWritable((Double) o) : null;
     } else if (o instanceof Date) {
-      return o != null ? new DateWritable((Date) o) : null;
+      return o != null ? new DateWritableV2((Date) o) : null;
     } else if (o instanceof Byte) {
       return o != null ? new ByteWritable((Byte) o): null;
     } else if (o instanceof Short) {
@@ -231,8 +230,8 @@ public class TestGenericUDFGreatest extends TestCase {
       return ((IntWritable) o).get();
     } else if (o instanceof DoubleWritable) {
       return ((DoubleWritable) o).get();
-    } else if (o instanceof DateWritable) {
-      return ((DateWritable) o).get();
+    } else if (o instanceof DateWritableV2) {
+      return ((DateWritableV2) o).get();
     } else if (o instanceof ByteWritable) {
       return ((ByteWritable) o).get();
     } else if (o instanceof ShortWritable) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
index 7d7c84d..972ab35 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
@@ -17,18 +17,17 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
+import junit.framework.TestCase;
+
 public class TestGenericUDFLastDay extends TestCase {
 
   public void testLastDay() throws HiveException {
@@ -65,41 +64,29 @@ public class TestGenericUDFLastDay extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFLastDay udf = new GenericUDFLastDay();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector[] arguments = { valueOI0 };
-
-      udf.initialize(arguments);
-
-      runAndVerify("2016-02-30", "2016-03-31", udf);
-      runAndVerify("2014-01-32", "2014-02-28", udf);
-      runAndVerify("01/14/2014", null, udf);
-      runAndVerify(null, null, udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    GenericUDFLastDay udf = new GenericUDFLastDay();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector[] arguments = {valueOI0};
+
+    udf.initialize(arguments);
+
+    runAndVerify("2016-02-30", "2016-03-31", udf);
+    runAndVerify("2014-01-32", "2014-02-28", udf);
+    runAndVerify("01/14/2014", null, udf);
+    runAndVerify(null, null, udf);
   }
 
   public void testWrongTsStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFLastDay udf = new GenericUDFLastDay();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector[] arguments = { valueOI0 };
-
-      udf.initialize(arguments);
-
-      runAndVerify("2016-02-30 10:30:45", "2016-03-31", udf);
-      runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
-      runAndVerify("01/14/2014 10:30:45", null, udf);
-      runAndVerify("2016-02-28T10:30:45", "2016-02-29", udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    GenericUDFLastDay udf = new GenericUDFLastDay();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector[] arguments = { valueOI0 };
+
+    udf.initialize(arguments);
+
+    runAndVerify("2016-02-30 10:30:45", "2016-03-31", udf);
+    runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
+    runAndVerify("01/14/2014 10:30:45", null, udf);
+    runAndVerify("2016-02-28T10:30:45", null, udf);
   }
 
   public void testLastDayTs() throws HiveException {
@@ -132,7 +119,7 @@ public class TestGenericUDFLastDay extends TestCase {
   }
 
   private void runAndVerifyTs(String str, String expResult, GenericUDF udf) throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2(
         Timestamp.valueOf(str)) : null);
     DeferredObject[] args = { valueObj0 };
     Text output = (Text) udf.evaluate(args);

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
index f966cb0..cccc70e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
@@ -17,16 +17,15 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -210,7 +209,7 @@ public class TestGenericUDFLeast extends TestCase {
     } else if (o instanceof Double) {
       return o != null ? new DoubleWritable((Double) o) : null;
     } else if (o instanceof Date) {
-      return o != null ? new DateWritable((Date) o) : null;
+      return o != null ? new DateWritableV2((Date) o) : null;
     } else if (o instanceof Byte) {
       return o != null ? new ByteWritable((Byte) o): null;
     } else if (o instanceof Short) {
@@ -231,8 +230,8 @@ public class TestGenericUDFLeast extends TestCase {
       return ((IntWritable) o).get();
     } else if (o instanceof DoubleWritable) {
       return ((DoubleWritable) o).get();
-    } else if (o instanceof DateWritable) {
-      return ((DateWritable) o).get();
+    } else if (o instanceof DateWritableV2) {
+      return ((DateWritableV2) o).get();
     } else if (o instanceof ByteWritable) {
       return ((ByteWritable) o).get();
     } else if (o instanceof ShortWritable) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
index 7eee550..e9f32a1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
@@ -17,14 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -60,20 +59,14 @@ public class TestGenericUDFMonthsBetween extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    boolean caught = false;
-    try {
-      GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween();
-      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector[] arguments = { valueOI1, valueOI2 };
-      udf.initialize(arguments);
-
-      runTestStr("2002-03", "2002-02-24", null, udf);
-      runTestStr("2002-03-24", "2002-02", null, udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween();
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector[] arguments = {valueOI1, valueOI2};
+    udf.initialize(arguments);
+
+    runTestStr("2002-03", "2002-02-24", null, udf);
+    runTestStr("2002-03-24", "2002-02", null, udf);
   }
 
   public void testMonthsBetweenForString(GenericUDFMonthsBetween udf) throws HiveException {
@@ -187,7 +180,7 @@ public class TestGenericUDFMonthsBetween extends TestCase {
     runTestTs("2002-03-24 00:00:00", "2002-02-24 10:30:00", 1.0, udf);
     runTestTs("2002-03-24 10:30:00", "2002-02-24 00:00:00", 1.0, udf);
 
-    runTestTs("2003-04-23 23:59:59", "2003-03-24 00:0:0", 0.99999963, udf);
+    runTestTs("2003-04-23 23:59:59", "2003-03-24 00:00:00", 0.99999963, udf);
   }
 
   public void testMonthsBetweenForDate() throws HiveException {
@@ -253,8 +246,8 @@ public class TestGenericUDFMonthsBetween extends TestCase {
 
   protected void runTestTs(String ts1, String ts2, Double expDiff, GenericUDFMonthsBetween udf)
       throws HiveException {
-    TimestampWritable tsWr1 = ts1 == null ? null : new TimestampWritable(Timestamp.valueOf(ts1));
-    TimestampWritable tsWr2 = ts2 == null ? null : new TimestampWritable(Timestamp.valueOf(ts2));
+    TimestampWritableV2 tsWr1 = ts1 == null ? null : new TimestampWritableV2(Timestamp.valueOf(ts1));
+    TimestampWritableV2 tsWr2 = ts2 == null ? null : new TimestampWritableV2(Timestamp.valueOf(ts2));
     DeferredJavaObject valueObj1 = new DeferredJavaObject(tsWr1);
     DeferredJavaObject valueObj2 = new DeferredJavaObject(tsWr2);
     DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };
@@ -269,8 +262,8 @@ public class TestGenericUDFMonthsBetween extends TestCase {
 
   protected void runTestDt(String dt1, String dt2, Double expDiff, GenericUDFMonthsBetween udf)
       throws HiveException {
-    DateWritable dtWr1 = dt1 == null ? null : new DateWritable(Date.valueOf(dt1));
-    DateWritable dtWr2 = dt2 == null ? null : new DateWritable(Date.valueOf(dt2));
+    DateWritableV2 dtWr1 = dt1 == null ? null : new DateWritableV2(Date.valueOf(dt1));
+    DateWritableV2 dtWr2 = dt2 == null ? null : new DateWritableV2(Date.valueOf(dt2));
     DeferredJavaObject valueObj1 = new DeferredJavaObject(dtWr1);
     DeferredJavaObject valueObj2 = new DeferredJavaObject(dtWr2);
     DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
index af7f0b0..c211fdd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
@@ -71,27 +71,21 @@ public class TestGenericUDFNextDay extends TestCase {
   }
 
   public void testNotValidValues() throws Exception {
-    boolean caught = false;
-    try {
-      GenericUDFNextDay udf = new GenericUDFNextDay();
-      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-      ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    GenericUDFNextDay udf = new GenericUDFNextDay();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-      udf.initialize(arguments);
+    udf.initialize(arguments);
 
-      runAndVerify("01/14/2015", "TU", null, udf);
-      runAndVerify("2015-01-14", "VT", null, udf);
-      runAndVerify("2015-02-30", "WE", "2015-03-04", udf);
-      runAndVerify("2015-02-32", "WE", "2015-03-11", udf);
-      runAndVerify("2015-02-30 10:30:00", "WE", "2015-03-04", udf);
-      runAndVerify("2015-02-32 10:30:00", "WE", "2015-03-11", udf);
-      runAndVerify("2015/01/14 14:04:34", "SAT", null, udf);
-      runAndVerify("2015-01-14T14:04:34", "SAT", "2015-01-17", udf);
-    } catch (HiveException e) {
-      caught = true;
-    }
-    assertTrue(caught);
+    runAndVerify("01/14/2015", "TU", null, udf);
+    runAndVerify("2015-01-14", "VT", null, udf);
+    runAndVerify("2015-02-30", "WE", "2015-03-04", udf);
+    runAndVerify("2015-02-32", "WE", "2015-03-11", udf);
+    runAndVerify("2015-02-30 10:30:00", "WE", "2015-03-04", udf);
+    runAndVerify("2015-02-32 10:30:00", "WE", "2015-03-11", udf);
+    runAndVerify("2015/01/14 14:04:34", "SAT", null, udf);
+    runAndVerify("2015-01-14T14:04:34", "SAT", null, udf);
   }
 
   public void testNextDayErrorArg1() throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
index 1402467..281b0d5 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -109,7 +109,7 @@ public class TestGenericUDFNullif {
 
     ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector,
         PrimitiveObjectInspectorFactory.writableByteObjectInspector };
-    DeferredObject[] args = { new DeferredJavaObject(new DateWritable(4)),
+    DeferredObject[] args = { new DeferredJavaObject(new DateWritableV2(4)),
         new DeferredJavaObject(new ByteWritable((byte) 4)) };
 
     udf.initialize(inputOIs);
@@ -123,8 +123,8 @@ public class TestGenericUDFNullif {
         PrimitiveObjectInspectorFactory.writableDateObjectInspector,
         PrimitiveObjectInspectorFactory.writableDateObjectInspector };
     DeferredObject[] args = {
-        new DeferredJavaObject(new DateWritable(4)),
-        new DeferredJavaObject(new DateWritable(4))
+        new DeferredJavaObject(new DateWritableV2(4)),
+        new DeferredJavaObject(new DateWritableV2(4))
         };
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);

http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
index efc9514..504aa7a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
@@ -18,26 +18,25 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -282,8 +281,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testDateMinusIntervalYearMonth() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    DateWritable left =
-        new DateWritable(Date.valueOf("2004-02-15"));
+    DateWritableV2 left =
+        new DateWritableV2(Date.valueOf("2004-02-15"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
     ObjectInspector[] inputOIs = {
@@ -297,7 +296,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
-    DateWritable res = (DateWritable) udf.evaluate(args);
+    DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
     Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
   }
 
@@ -305,8 +304,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampMinusIntervalYearMonth() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    TimestampWritable left =
-        new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
+    TimestampWritableV2 left =
+        new TimestampWritableV2(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
     ObjectInspector[] inputOIs = {
@@ -320,7 +319,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp());
   }
 
@@ -351,8 +350,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampMinusIntervalDayTime() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    TimestampWritable left =
-        new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567"));
+    TimestampWritableV2 left =
+        new TimestampWritableV2(Timestamp.valueOf("2001-01-02 2:3:4.567"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
     ObjectInspector[] inputOIs = {
@@ -366,7 +365,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"), res.getTimestamp());
   }
 
@@ -374,8 +373,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testDateMinusIntervalDayTime() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    DateWritable left =
-        new DateWritable(Date.valueOf("2001-01-01"));
+    DateWritableV2 left =
+        new DateWritableV2(Date.valueOf("2001-01-01"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555"));
     ObjectInspector[] inputOIs = {
@@ -389,7 +388,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2000-12-30 23:59:59.445"), res.getTimestamp());
   }
 }