You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/06/28 22:48:41 UTC

[45/52] [abbrv] hive git commit: HIVE-20007: Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
index d94dbe8..2aeaa33 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -161,7 +162,8 @@ public class LazyHBaseRow extends LazyStruct {
         }
         LazyObjectBase lz = fields[fieldID];
         if (lz instanceof LazyTimestamp) {
-          ((LazyTimestamp) lz).getWritableObject().setTime(timestamp);
+          ((LazyTimestamp) lz).getWritableObject().set(
+              Timestamp.ofEpochMilli(timestamp));
         } else {
           ((LazyLong) lz).getWritableObject().set(timestamp);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index fabbfba..6e7d7e6 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -97,26 +97,26 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-0	val_0	2012-02-22 17:15:54
-119	val_119	2012-02-22 17:15:54
-136	val_136	2012-02-22 17:15:54
-153	val_153	2012-02-22 17:15:54
-17	val_17	2012-02-22 17:15:54
-170	val_170	2012-02-22 17:15:54
-187	val_187	2012-02-22 17:15:54
-221	val_221	2012-02-22 17:15:54
-238	val_238	2012-02-22 17:15:54
-255	val_255	2012-02-22 17:15:54
-272	val_272	2012-02-22 17:15:54
-289	val_289	2012-02-22 17:15:54
-306	val_306	2012-02-22 17:15:54
-323	val_323	2012-02-22 17:15:54
-34	val_34	2012-02-22 17:15:54
-374	val_374	2012-02-22 17:15:54
-459	val_459	2012-02-22 17:15:54
-493	val_493	2012-02-22 17:15:54
-51	val_51	2012-02-22 17:15:54
-85	val_85	2012-02-22 17:15:54
+0	val_0	2012-02-23 01:15:54
+119	val_119	2012-02-23 01:15:54
+136	val_136	2012-02-23 01:15:54
+153	val_153	2012-02-23 01:15:54
+17	val_17	2012-02-23 01:15:54
+170	val_170	2012-02-23 01:15:54
+187	val_187	2012-02-23 01:15:54
+221	val_221	2012-02-23 01:15:54
+238	val_238	2012-02-23 01:15:54
+255	val_255	2012-02-23 01:15:54
+272	val_272	2012-02-23 01:15:54
+289	val_289	2012-02-23 01:15:54
+306	val_306	2012-02-23 01:15:54
+323	val_323	2012-02-23 01:15:54
+34	val_34	2012-02-23 01:15:54
+374	val_374	2012-02-23 01:15:54
+459	val_459	2012-02-23 01:15:54
+493	val_493	2012-02-23 01:15:54
+51	val_51	2012-02-23 01:15:54
+85	val_85	2012-02-23 01:15:54
 PREHOOK: query: DROP TABLE hbase_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@hbase_table
@@ -202,8 +202,8 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-165	val_165	1973-03-03 01:46:40
-396	val_396	1973-03-03 01:46:40
+165	val_165	1973-03-03 09:46:40
+396	val_396	1973-03-03 09:46:40
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 PREHOOK: type: QUERY
@@ -251,10 +251,10 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-111	val_111	1976-05-03 12:33:20
-222	val_222	1976-05-03 12:33:20
-296	val_296	1976-05-03 12:33:20
-333	val_333	1976-05-03 12:33:20
+111	val_111	1976-05-03 19:33:20
+222	val_222	1976-05-03 19:33:20
+296	val_296	1976-05-03 19:33:20
+333	val_333	1976-05-03 19:33:20
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 PREHOOK: type: QUERY
@@ -302,8 +302,8 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-165	val_165	1973-03-03 01:46:40
-396	val_396	1973-03-03 01:46:40
+165	val_165	1973-03-03 09:46:40
+396	val_396	1973-03-03 09:46:40
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 PREHOOK: type: QUERY
@@ -351,10 +351,10 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-111	val_111	1976-05-03 12:33:20
-222	val_222	1976-05-03 12:33:20
-296	val_296	1976-05-03 12:33:20
-333	val_333	1976-05-03 12:33:20
+111	val_111	1976-05-03 19:33:20
+222	val_222	1976-05-03 19:33:20
+296	val_296	1976-05-03 19:33:20
+333	val_333	1976-05-03 19:33:20
 PREHOOK: query: DROP TABLE hbase_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@hbase_table

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
index 6dcee40..d33d343 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
@@ -19,12 +19,12 @@
 
 package org.apache.hive.hcatalog.data;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
index 57c48f8..405f1b0 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
@@ -19,16 +19,16 @@
 
 package org.apache.hive.hcatalog.data;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
index 114c205..af80c02 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
@@ -21,8 +21,6 @@ package org.apache.hive.hcatalog.data;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -34,9 +32,11 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
index cb1c459..2641add 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
@@ -22,7 +22,6 @@ package org.apache.hive.hcatalog.data;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.sql.Date;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.Iterator;
@@ -30,14 +29,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.io.VIntWritable;
 import org.apache.hadoop.io.VLongWritable;
 
@@ -121,11 +122,11 @@ public abstract class ReaderWriter {
       hdw.readFields(in);
       return hdw.getHiveDecimal();
     case DataType.DATE:
-      DateWritable dw = new DateWritable();
+      DateWritableV2 dw = new DateWritableV2();
       dw.readFields(in);
       return dw.get();
     case DataType.TIMESTAMP:
-      TimestampWritable tw = new TimestampWritable();
+      TimestampWritableV2 tw = new TimestampWritableV2();
       tw.readFields(in);
       return tw.getTimestamp();
     default:
@@ -214,10 +215,10 @@ public abstract class ReaderWriter {
       new HiveDecimalWritable((HiveDecimal)val).write(out);
       return;
     case DataType.DATE:
-      new DateWritable((Date)val).write(out);
+      new DateWritableV2((Date)val).write(out);
       return;
     case DataType.TIMESTAMP:
-      new TimestampWritable((java.sql.Timestamp)val).write(out);
+      new TimestampWritableV2((Timestamp)val).write(out);
       return;
     default:
       throw new IOException("Unexpected data type " + type +

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
index 7a643f1..d57d171 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
@@ -29,17 +29,17 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
@@ -276,7 +276,7 @@ public class TestDefaultHCatRecord extends TestCase {
     rec_hcat13types.add(new HiveChar("hive_char", 10));
     rec_hcat13types.add(new HiveVarchar("hive_varchar", 20));
     rec_hcat13types.add(Date.valueOf("2014-01-06"));
-    rec_hcat13types.add(new Timestamp(System.currentTimeMillis()));
+    rec_hcat13types.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
     return new DefaultHCatRecord(rec_hcat13types);
   }
   private static HCatRecord getHCat13TypesComplexRecord() {
@@ -290,7 +290,7 @@ public class TestDefaultHCatRecord extends TestCase {
     List<Object> list = new ArrayList<Object>();
     list.add(Date.valueOf("2014-01-05"));
     list.add(new HashMap<HiveDecimal, String>(m));
-    m2.put(new Timestamp(System.currentTimeMillis()), list);
+    m2.put(Timestamp.ofEpochMilli(System.currentTimeMillis()), list);
     rec_hcat13ComplexTypes.add(m2);
     return new DefaultHCatRecord(rec_hcat13ComplexTypes);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
index 8aeb4f4..6770d44 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
@@ -20,8 +20,6 @@ package org.apache.hive.hcatalog.data;
 
 import java.io.UnsupportedEncodingException;
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -32,9 +30,11 @@ import java.util.Properties;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -98,7 +98,7 @@ public class TestJsonSerDe extends TestCase {
     rlist.add(new HiveChar("hive\nchar", 10));
     rlist.add(new HiveVarchar("hive\nvarchar", 20));
     rlist.add(Date.valueOf("2014-01-07"));
-    rlist.add(new Timestamp(System.currentTimeMillis()));
+    rlist.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
     rlist.add("hive\nbinary".getBytes("UTF-8"));
 
     List<Object> nlist = new ArrayList<Object>(13);

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
index ec620d2..994c505 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
@@ -21,8 +21,6 @@ package org.apache.hive.hcatalog.pig;
 
 import java.io.IOException;
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -35,9 +33,11 @@ import java.util.Properties;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -419,7 +419,7 @@ abstract class HCatBaseStorer extends StoreFunc implements StoreMetadata {
         return new HiveVarchar(varcharVal, vti.getLength());
       case TIMESTAMP:
         DateTime dt = (DateTime)pigObj;
-        return new Timestamp(dt.getMillis());//getMillis() returns UTC time regardless of TZ
+        return Timestamp.ofEpochMilli(dt.getMillis());//toEpochMilli() returns UTC time regardless of TZ
       case DATE:
         /**
          * We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any
@@ -437,7 +437,7 @@ abstract class HCatBaseStorer extends StoreFunc implements StoreMetadata {
           for local timezone.  Date.valueOf() also uses local timezone (as does Date(int,int,int).
           Also see PigHCatUtil#extractPigObject() for corresponding read op.  This way a DATETIME from Pig,
           when stored into Hive and read back comes back with the same value.*/
-        return new Date(dateTime.getYear() - 1900, dateTime.getMonthOfYear() - 1, dateTime.getDayOfMonth());
+        return Date.of(dateTime.getYear(), dateTime.getMonthOfYear(), dateTime.getDayOfMonth());
       default:
         throw new BackendException("Unexpected HCat type " + type + " for value " + pigObj
           + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE);

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
index f5c3c75..c3bde2d 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
@@ -54,6 +54,7 @@ import org.apache.pig.ResourceSchema;
 import org.apache.pig.ResourceStatistics;
 import org.apache.pig.impl.util.UDFContext;
 import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
 import org.joda.time.format.DateTimeFormat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
index 163fe6a..afe6e92 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
@@ -20,8 +20,6 @@ package org.apache.hive.hcatalog.pig;
 
 
 import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -31,9 +29,11 @@ import java.util.Map.Entry;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -62,6 +62,7 @@ import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.util.UDFContext;
 import org.apache.pig.impl.util.Utils;
 import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -429,7 +430,7 @@ class PigHCatUtil {
       * e.g. d = new java.sql.Date(System.currentMillis()).toString() so if you do this just after
       * midnight in Palo Alto, you'll get yesterday's date printed out.*/
       Date d = (Date)o;
-      result = new DateTime(d.getYear() + 1900, d.getMonth() + 1, d.getDate(), 0, 0);//uses local TZ
+      result = new DateTime(d.getYear(), d.getMonth(), d.getDay(), 0, 0, DateTimeZone.UTC);
       break;
     case TIMESTAMP:
       /*DATA TRUNCATION!!!
@@ -437,7 +438,7 @@ class PigHCatUtil {
        object in local TZ; This is arbitrary, since Hive value doesn't have any TZ notion, but
        we need to set something for TZ.
        Timestamp is consistently in GMT (unless you call toString() on it) so we use millis*/
-      result = new DateTime(((Timestamp)o).getTime());//uses local TZ
+      result = new DateTime(((Timestamp)o).toEpochMilli(), DateTimeZone.UTC);
       break;
     default:
       result = o;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
index 0d72102..58981f8 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
@@ -29,8 +29,6 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.RandomAccessFile;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -42,6 +40,8 @@ import java.util.Properties;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -652,7 +652,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
      * All the values are within range of target data type (column)
      */
     private static final Object[][] primitiveRows = new Object[][] {
-        {Boolean.TRUE,Byte.MAX_VALUE,Short.MAX_VALUE, Integer.MAX_VALUE,Long.MAX_VALUE,Float.MAX_VALUE,Double.MAX_VALUE,555.22,"Kyiv","char(10)xx","varchar(20)","blah".getBytes(),Date.valueOf("2014-01-13"),Timestamp.valueOf("2014-01-13 19:26:25.0123")},
+        {Boolean.TRUE,Byte.MAX_VALUE,Short.MAX_VALUE, Integer.MAX_VALUE,Long.MAX_VALUE,Float.MAX_VALUE,Double.MAX_VALUE,555.22,"Kyiv","char(10)xx","varchar(20)","blah".getBytes(), Date.valueOf("2014-01-13"), Timestamp.valueOf("2014-01-13 19:26:25.0123")},
         {Boolean.FALSE,Byte.MIN_VALUE,Short.MIN_VALUE, Integer.MIN_VALUE,Long.MIN_VALUE,Float.MIN_VALUE,Double.MIN_VALUE,-555.22,"Saint Petersburg","char(xx)00","varchar(yy)","doh".getBytes(),Date.valueOf("2014-01-14"), Timestamp.valueOf("2014-01-14 19:26:25.0123")}
     };
     /**
@@ -701,14 +701,22 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
             assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
                 + " Reference data is null; actual "
                 + t.get(colPos), t.get(colPos) == null);
-          } else if (referenceData instanceof java.util.Date) {
+          } else if (referenceData instanceof Date) {
             // Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
             // reading Hive from Pig by design.
             assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
-                + " Reference data=" + ((java.util.Date)referenceData).getTime()
+                    + " Reference data=" + ((Date)referenceData).toEpochMilli()
+                    + " actual=" + ((DateTime)t.get(colPos)).getMillis()
+                    + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
+                ((Date)referenceData).toEpochMilli() == ((DateTime)t.get(colPos)).getMillis());
+          } else if (referenceData instanceof Timestamp) {
+            // Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
+            // reading Hive from Pig by design.
+            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+                + " Reference data=" + ((Timestamp)referenceData).toEpochMilli()
                 + " actual=" + ((DateTime)t.get(colPos)).getMillis()
                 + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
-                ((java.util.Date)referenceData).getTime()== ((DateTime)t.get(colPos)).getMillis());
+                ((Timestamp)referenceData).toEpochMilli()== ((DateTime)t.get(colPos)).getMillis());
           } else {
             // Doing String comps here as value objects in Hive in Pig are different so equals()
             // doesn't work.

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index a5cf3a5..19c30b0 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -213,23 +213,17 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
    */
   @Test
   public void testWriteTimestamp() throws Exception {
-    DateTime d = new DateTime(1991, 10, 11, 14, 23, 30, 10);// uses default TZ
+    DateTime d = new DateTime(1991, 10, 11, 14, 23, 30, 10, DateTimeZone.UTC);// uses default TZ
     pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.getDefault()).toString());
+        d.toDateTime(DateTimeZone.UTC).toString());
     d = d.plusHours(2);
     pigValueRangeTest("junitTypeTest2", "timestamp", "datetime",
         HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, d.toString(),
-        d.toDateTime(DateTimeZone.getDefault()).toString());
-    d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.getDefault()).toString());
+        d.toDateTime(DateTimeZone.UTC).toString());
 
-    d = new DateTime(1991, 10, 11, 23, 24, 25, 26);
+    d = new DateTime(1991, 10, 11, 23, 24, 25, 26, DateTimeZone.UTC);
     pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.getDefault()).toString());
-    d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.getDefault()).toString());
+        d.toDateTime(DateTimeZone.UTC).toString());
   }
 
   // End: tests that check values from Pig that are out of range for target column

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
----------------------------------------------------------------------
diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
index d80b6d4..2ed5e5e 100644
--- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
+++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
@@ -16,11 +16,10 @@
 
 package org.apache.hive.benchmark.vectorization;
 
-import java.sql.Timestamp;
 import java.util.Random;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -28,12 +27,14 @@ import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
+
 public class ColumnVectorGenUtil {
 
   private static final long LONG_VECTOR_NULL_VALUE = 1;
@@ -144,7 +145,7 @@ public class ColumnVectorGenUtil {
     final boolean repeating, final int size, final Random rand) {
     Timestamp[] timestamps = new Timestamp[size];
     for (int i = 0; i < size; i++) {
-      timestamps[i] = new Timestamp(rand.nextInt());
+      timestamps[i] = Timestamp.ofEpochMilli(rand.nextInt());
     }
     return generateTimestampColumnVector(nulls, repeating, size, rand, timestamps);
   }
@@ -169,10 +170,10 @@ public class ColumnVectorGenUtil {
         tcv.isNull[i] = false;
         if (!repeating) {
           Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
-          tcv.set(i, randomTimestamp);
+          tcv.set(i, randomTimestamp.toSqlTimestamp());
           timestampValues[i] = randomTimestamp;
         } else {
-          tcv.set(i, repeatingTimestamp);
+          tcv.set(i, repeatingTimestamp.toSqlTimestamp());
           timestampValues[i] = repeatingTimestamp;
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
index 7a891ef..280119b 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
@@ -31,12 +31,10 @@ import java.lang.reflect.Field;
 import java.math.BigDecimal;
 import java.net.URL;
 import java.sql.Connection;
-import java.sql.Date;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -76,6 +74,8 @@ import org.apache.hadoop.io.Text;
 
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
 import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.llap.LlapBaseInputFormat;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
index 9dfece9..e69c686 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
@@ -21,8 +21,8 @@ package org.apache.hive.jdbc;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertArrayEquals;
 import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import java.util.List;
 import org.apache.hadoop.hive.llap.FieldDesc;
 import org.apache.hadoop.hive.llap.Row;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 04cd9f5..0f1208a 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -464,6 +464,7 @@ minillaplocal.query.files=\
   retry_failure_oom.q,\
   runtime_stats_hs2.q,\
   bucketsortoptimize_insert_2.q,\
+  change_allowincompatible_vectorization_false_date.q,\
   check_constraint.q,\
   cbo_gby.q,\
   cbo_join.q,\
@@ -1689,6 +1690,7 @@ druid.query.files=druidmini_test1.q,\
   druidmini_test_insert.q,\
   druidmini_mv.q,\
   druid_timestamptz.q,\
+  druid_timestamptz2.q,\
   druidmini_dynamic_partition.q,\
   druidmini_expressions.q,\
   druidmini_extractTime.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
----------------------------------------------------------------------
diff --git a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
index 77559e1..f328d6e 100644
--- a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
+++ b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.llap.io;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
 import java.io.FilterInputStream;
 import java.io.FilterOutputStream;
 import java.io.IOException;
@@ -31,7 +29,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
 import org.junit.Test;
 import static org.junit.Assert.*;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
index 32f3bed..af853e3 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
@@ -105,7 +105,7 @@ public class GenericColumnVectorProducer implements ColumnVectorProducer {
 
   public static final class SerDeStripeMetadata implements ConsumerStripeMetadata {
     // The writer is local to the process.
-    private final String writerTimezone = TimeZone.getDefault().getID();
+    private final String writerTimezone = "UTC";
     private List<ColumnEncoding> encodings;
     private final int stripeIx;
     private long rowCount = -1;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index 0d7435c..40248a3 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -223,7 +223,8 @@ public class OrcEncodedDataConsumer
     TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext()
             .setSchemaEvolution(evolution).skipCorrupt(skipCorrupt)
             .writerTimeZone(stripeMetadata.getWriterTimezone())
-            .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion());
+            .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion())
+            .useUTCTimestamp(true);
     this.batchSchemas = includes.getBatchReaderTypes(fileSchema);
     StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader(
         batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors);

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
index 0d3ee2b..f4e85bd 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
-import java.sql.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DTIColumnCompareScalar.txt, which covers comparison

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
index be5f641..b198e08 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import java.sql.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
index 32dd6ed..c3982ed 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -99,65 +99,65 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
       scratchIntervalYearMonth2.set((int) vector2[0]);
       dtm.<OperatorMethod>(
           scratchDate1, scratchIntervalYearMonth2,  outputDate);
-      outputVector[0] = DateWritable.dateToDays(outputDate);
+      outputVector[0] = DateWritableV2.dateToDays(outputDate);
     } else if (inputColVector1.isRepeating) {
-      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else if (inputColVector2.isRepeating) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
index 94c0c5c..06c7368 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -93,10 +93,10 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector1.isRepeating) {
       if (inputColVector1.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+        scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
         dtm.<OperatorMethod>(
             scratchDate1, value, outputDate);
-        outputVector[0] = DateWritable.dateToDays(outputDate);
+        outputVector[0] = DateWritableV2.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -115,18 +115,18 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+             scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
              dtm.<OperatorMethod>(
                  scratchDate1, value, outputDate);
-             outputVector[i] = DateWritable.dateToDays(outputDate);
+             outputVector[i] = DateWritableV2.dateToDays(outputDate);
            }
          }
       } else {
@@ -138,10 +138,10 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchDate1, value, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -155,10 +155,10 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -168,10 +168,10 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
index 96c525d..53637a6 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticTimestampColumn.txt, a class
@@ -97,12 +97,12 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
       dtm.<OperatorMethod>(
           scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
       outputColVector.setFromScratch<CamelReturnType>(0);
     } else if (inputColVector1.isRepeating) {
-      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
@@ -122,14 +122,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
          }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
@@ -139,14 +139,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
          dtm.<OperatorMethod>(
               scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
index fb22992..e9e9193 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
@@ -91,7 +91,7 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector1.isRepeating) {
       if (inputColVector1.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+        scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
         dtm.<OperatorMethod>(
             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
         outputColVector.setFromScratch<CamelReturnType>(0);
@@ -112,7 +112,7 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -120,7 +120,7 @@ public class <ClassName> extends VectorExpression {
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+             scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
              dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
              outputColVector.setFromScratch<CamelReturnType>(i);
@@ -135,7 +135,7 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
              scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
@@ -152,7 +152,7 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -165,7 +165,7 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -182,7 +182,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + value.toString();
+    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
index 0c8ec9c..7d2434a 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -58,7 +58,7 @@ public class <ClassName> extends VectorExpression {
 
   public <ClassName>(long value, int colNum, int outputColumnNum) {
     super(outputColumnNum);
-    this.value = new Date(DateWritable.daysToMillis((int) value));
+    this.value = new Date(DateWritableV2.daysToMillis((int) value));
     this.colNum = colNum;
   }
 
@@ -110,7 +110,7 @@ public class <ClassName> extends VectorExpression {
         scratchIntervalYearMonth2.set((int) vector2[0]);
         dtm.<OperatorMethod>(
             value, scratchIntervalYearMonth2, outputDate);
-        outputVector[0] = DateWritable.dateToDays(outputDate);
+        outputVector[0] = DateWritableV2.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -131,7 +131,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
@@ -139,7 +139,7 @@ public class <ClassName> extends VectorExpression {
              scratchIntervalYearMonth2.set((int) vector2[i]);
              dtm.<OperatorMethod>(
                  value, scratchIntervalYearMonth2, outputDate);
-             outputVector[i] = DateWritable.dateToDays(outputDate);
+             outputVector[i] = DateWritableV2.dateToDays(outputDate);
            }
          }
       } else {
@@ -154,7 +154,7 @@ public class <ClassName> extends VectorExpression {
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               value, scratchIntervalYearMonth2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -171,7 +171,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -184,7 +184,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
index ef8f2a3..678d827 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticTimestampColumnBase.txt.
@@ -57,7 +57,7 @@ public class <ClassName> extends VectorExpression {
     super(outputColumnNum);
     // Scalar input #1 is type date (days).  For the math we convert it to a timestamp.
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.value.setTime(DateWritableV2.daysToMillis((int) value));
     this.colNum = colNum;
   }
 
@@ -189,7 +189,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + value + ", " + getColumnParamString(1, colNum);
+    return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
index 5242bbd..1785abe 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
@@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.Filter<TypeName>ColumnBetween;
 import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
index 3fa542b..6a05d77 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
@@ -141,7 +141,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + value.toString();
+    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
index fcda65f..36628a7 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
@@ -144,7 +144,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
index 393413f..4473bf0 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -100,44 +100,44 @@ public class <ClassName> extends VectorExpression {
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
       scratchIntervalYearMonth1.set((int) vector1[0]);
-      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
       dtm.<OperatorMethod>(
           scratchIntervalYearMonth1, scratchDate2, outputDate);
-      outputVector[0] = DateWritable.dateToDays(outputDate);
+      outputVector[0] = DateWritableV2.dateToDays(outputDate);
     } else if (inputColVector1.isRepeating) {
       scratchIntervalYearMonth1.set((int) vector1[0]);
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else if (inputColVector2.isRepeating) {
-      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else {
@@ -145,18 +145,18 @@ public class <ClassName> extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth1.set((int) vector1[i]);
-          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth1.set((int) vector1[i]);
-          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
index a65c4b9..f0c1910 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -50,7 +50,7 @@ public class <ClassName> extends VectorExpression {
   public <ClassName>(int colNum, long value, int outputColumnNum) {
     super(outputColumnNum);
     this.colNum = colNum;
-    this.value = new Date(DateWritable.daysToMillis((int) value));
+    this.value = new Date(DateWritableV2.daysToMillis((int) value));
   }
 
   public <ClassName>() {
@@ -96,7 +96,7 @@ public class <ClassName> extends VectorExpression {
         scratchIntervalYearMonth1.set((int) vector1[0]);
         dtm.<OperatorMethod>(
             scratchIntervalYearMonth1, value, outputDate);
-        outputVector[0] = DateWritable.dateToDays(outputDate);
+        outputVector[0] = DateWritableV2.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -118,7 +118,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
@@ -126,7 +126,7 @@ public class <ClassName> extends VectorExpression {
              scratchIntervalYearMonth1.set((int) vector1[i]);
              dtm.<OperatorMethod>(
                  scratchIntervalYearMonth1, value, outputDate);
-             outputVector[i] = DateWritable.dateToDays(outputDate);
+             outputVector[i] = DateWritableV2.dateToDays(outputDate);
            }
          }
       } else {
@@ -141,7 +141,7 @@ public class <ClassName> extends VectorExpression {
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, value, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -158,7 +158,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -170,7 +170,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
             outputIsNull[i] = false;
           } else {
             outputIsNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
index c297116..71e142c 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
@@ -182,7 +182,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + value.toString();
+    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
index 34d516e..27f2fcf 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -107,10 +107,10 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector2.isRepeating) {
       if (inputColVector2.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+        scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
         dtm.<OperatorMethod>(
             value, scratchDate2, outputDate);
-        outputVector[0] = DateWritable.dateToDays(outputDate);
+        outputVector[0] = DateWritableV2.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -129,18 +129,18 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+             scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
              dtm.<OperatorMethod>(
                  value, scratchDate2, outputDate);
-             outputVector[i] = DateWritable.dateToDays(outputDate);
+             outputVector[i] = DateWritableV2.dateToDays(outputDate);
            }
          }
       } else {
@@ -152,10 +152,10 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value, scratchDate2, outputDate);
-          outputVector[i] = DateWritable.dateToDays(outputDate);
+          outputVector[i] = DateWritableV2.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -169,10 +169,10 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -182,10 +182,10 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritable.dateToDays(outputDate);
+            outputVector[i] = DateWritableV2.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
index 19263d9..397fca5 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
@@ -157,7 +157,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + value;
+    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
index 45f6408..8e44c92 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
@@ -27,8 +27,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template TimestampColumnArithmeticDateColumn.txt, which covers binary arithmetic
@@ -96,7 +96,7 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
       dtm.<OperatorMethod>(
           inputColVector1.asScratch<CamelOperandType1>(0), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
       outputColVector.setFromScratch<CamelReturnType>(0);
@@ -105,21 +105,21 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       }
     } else if (inputColVector2.isRepeating) {
-      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
@@ -138,14 +138,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);