You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2018/06/25 10:38:09 UTC

[01/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Repository: hive
Updated Branches:
  refs/heads/master 227766166 -> 33088de0e


http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
index 6a96ddd..5d73806 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
- * A WritableDateObjectInspector inspects a DateWritableV2 Object.
+ * A WritableDateObjectInspector inspects a DateWritable Object.
  */
 public class WritableDateObjectInspector extends
     AbstractPrimitiveWritableObjectInspector implements
@@ -33,49 +34,35 @@ public class WritableDateObjectInspector extends
   }
 
   @Override
-  public DateWritableV2 getPrimitiveWritableObject(Object o) {
-    return o == null ? null : (DateWritableV2) o;
+  public DateWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : (DateWritable) o;
   }
 
   public Date getPrimitiveJavaObject(Object o) {
-    return o == null ? null : ((DateWritableV2) o).get();
+    return o == null ? null : ((DateWritable) o).get();
   }
 
   public Object copyObject(Object o) {
-    return o == null ? null : new DateWritableV2((DateWritableV2) o);
+    return o == null ? null : new DateWritable((DateWritable) o);
   }
 
   public Object set(Object o, Date d) {
     if (d == null) {
       return null;
     }
-    ((DateWritableV2) o).set(d);
+    ((DateWritable) o).set(d);
     return o;
   }
 
-  @Deprecated
-  public Object set(Object o, java.sql.Date d) {
+  public Object set(Object o, DateWritable d) {
     if (d == null) {
       return null;
     }
-    ((DateWritableV2) o).set(Date.ofEpochMilli(d.getTime()));
+    ((DateWritable) o).set(d);
     return o;
   }
 
-  public Object set(Object o, DateWritableV2 d) {
-    if (d == null) {
-      return null;
-    }
-    ((DateWritableV2) o).set(d);
-    return o;
-  }
-
-  @Deprecated
-  public Object create(java.sql.Date value) {
-    return new DateWritableV2(Date.ofEpochMilli(value.getTime()));
-  }
-
   public Object create(Date d) {
-    return new DateWritableV2(d);
+    return new DateWritable(d);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
index 17888fd..ba407aa 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
@@ -21,7 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.io.Text;
@@ -30,6 +30,10 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.BooleanWritable;
 
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.Charset;
+
 public class WritableHiveCharObjectInspector extends AbstractPrimitiveWritableObjectInspector
     implements SettableHiveCharObjectInspector {
   // no-arg ctor required for Kyro serialization
@@ -48,7 +52,7 @@ public class WritableHiveCharObjectInspector extends AbstractPrimitiveWritableOb
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
+    if ((o instanceof Text) || (o instanceof TimestampWritable)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -71,7 +75,7 @@ public class WritableHiveCharObjectInspector extends AbstractPrimitiveWritableOb
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
+    if ((o instanceof Text) || (o instanceof TimestampWritable)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
index 456858c..81c0550 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
@@ -18,13 +18,13 @@
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.FloatWritable;
@@ -52,7 +52,7 @@ implements SettableHiveVarcharObjectInspector {
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
+    if ((o instanceof Text) || (o instanceof TimestampWritable)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -75,7 +75,7 @@ implements SettableHiveVarcharObjectInspector {
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
+    if ((o instanceof Text) || (o instanceof TimestampWritable)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
index e0ab191..47b51f5 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 public class WritableTimestampObjectInspector extends
@@ -30,29 +31,20 @@ public class WritableTimestampObjectInspector extends
   }
 
   @Override
-  public TimestampWritableV2 getPrimitiveWritableObject(Object o) {
-    return o == null ? null : (TimestampWritableV2) o;
+  public TimestampWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : (TimestampWritable) o;
   }
 
   public Timestamp getPrimitiveJavaObject(Object o) {
-    return o == null ? null : ((TimestampWritableV2) o).getTimestamp();
+    return o == null ? null : ((TimestampWritable) o).getTimestamp();
   }
 
   public Object copyObject(Object o) {
-    return o == null ? null : new TimestampWritableV2((TimestampWritableV2) o);
+    return o == null ? null : new TimestampWritable((TimestampWritable) o);
   }
 
   public Object set(Object o, byte[] bytes, int offset) {
-    ((TimestampWritableV2) o).set(bytes, offset);
-    return o;
-  }
-
-  @Deprecated
-  public Object set(Object o, java.sql.Timestamp t) {
-    if (t == null) {
-      return null;
-    }
-    ((TimestampWritableV2) o).set(Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+    ((TimestampWritable) o).set(bytes, offset);
     return o;
   }
 
@@ -60,27 +52,23 @@ public class WritableTimestampObjectInspector extends
     if (t == null) {
       return null;
     }
-    ((TimestampWritableV2) o).set(t);
+    ((TimestampWritable) o).set(t);
     return o;
   }
 
-  public Object set(Object o, TimestampWritableV2 t) {
+  public Object set(Object o, TimestampWritable t) {
     if (t == null) {
       return null;
     }
-    ((TimestampWritableV2) o).set(t);
+    ((TimestampWritable) o).set(t);
     return o;
   }
 
   public Object create(byte[] bytes, int offset) {
-    return new TimestampWritableV2(bytes, offset);
-  }
-
-  public Object create(java.sql.Timestamp t) {
-    return new TimestampWritableV2(Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+    return new TimestampWritable(bytes, offset);
   }
 
   public Object create(Timestamp t) {
-    return new TimestampWritableV2(t);
+    return new TimestampWritable(t);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
index c0f9726..749d8ac 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
@@ -18,18 +18,19 @@
 
 package org.apache.hadoop.hive.serde2;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
index c6b77ed..22aadbb 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.serde2;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -27,19 +29,16 @@ import java.util.Map.Entry;
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -47,6 +46,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -244,7 +244,7 @@ public class VerifyFast {
         case DATE:
           {
             Date value = deserializeRead.currentDateWritable.get();
-            Date expected = ((DateWritableV2) object).get();
+            Date expected = ((DateWritable) object).get();
             if (!value.equals(expected)) {
               TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -253,7 +253,7 @@ public class VerifyFast {
         case TIMESTAMP:
           {
             Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
-            Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp expected = ((TimestampWritable) object).getTimestamp();
             if (!value.equals(expected)) {
               TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -394,13 +394,13 @@ public class VerifyFast {
           break;
         case DATE:
           {
-            Date value = ((DateWritableV2) object).get();
+            Date value = ((DateWritable) object).get();
             serializeWrite.writeDate(value);
           }
           break;
         case TIMESTAMP:
           {
-            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp value = ((TimestampWritable) object).getTimestamp();
             serializeWrite.writeTimestamp(value);
           }
           break;
@@ -571,9 +571,9 @@ public class VerifyFast {
     case DECIMAL:
       return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritableV2(deserializeRead.currentDateWritable);
+      return new DateWritable(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
+      return new TimestampWritable(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
index a486ab1..c270d71 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
@@ -17,18 +17,18 @@
  */
 package org.apache.hadoop.hive.serde2.binarysortable;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
index 6febc36..82d126a 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
@@ -17,21 +17,26 @@
  */
 package org.apache.hadoop.hive.serde2.binarysortable;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Date;
+import junit.framework.TestCase;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.common.type.HiveBaseChar;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -41,12 +46,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarch
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.Writable;
 import org.apache.hive.common.util.DateUtils;
 
-import junit.framework.TestCase;
-
 // Just the primitive types.
 public class MyTestPrimitiveClass {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
index 262e55a..97eb967 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
@@ -18,35 +18,29 @@
 
 package org.apache.hadoop.hive.serde2.io;
 
-import com.google.code.tempusfugit.concurrency.ConcurrentRule;
-import com.google.code.tempusfugit.concurrency.RepeatingRule;
-import com.google.code.tempusfugit.concurrency.annotations.Concurrent;
-import com.google.code.tempusfugit.concurrency.annotations.Repeating;
-import org.apache.hadoop.hive.common.type.Date;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.Test;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+import org.junit.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
+import static org.junit.Assert.*;
+import java.io.*;
+import java.sql.Date;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.Calendar;
+import java.util.GregorianCalendar;
 import java.util.LinkedList;
 import java.util.TimeZone;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class TestDateWritableV2 {
-  private static final Logger LOG = LoggerFactory.getLogger(TestDateWritableV2.class);
+public class TestDateWritable {
+  private static final Logger LOG = LoggerFactory.getLogger(TestDateWritable.class);
 
   @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
   @Rule public RepeatingRule repeatingRule = new RepeatingRule();
@@ -56,9 +50,9 @@ public class TestDateWritableV2 {
   @Repeating(repetition=100)
   public void testConstructor() {
     Date date = Date.valueOf(getRandomDateString());
-    DateWritableV2 dw1 = new DateWritableV2(date);
-    DateWritableV2 dw2 = new DateWritableV2(dw1);
-    DateWritableV2 dw3 = new DateWritableV2(dw1.getDays());
+    DateWritable dw1 = new DateWritable(date);
+    DateWritable dw2 = new DateWritable(dw1);
+    DateWritable dw3 = new DateWritable(dw1.getDays());
 
     assertEquals(dw1, dw1);
     assertEquals(dw1, dw2);
@@ -79,9 +73,9 @@ public class TestDateWritableV2 {
       date2 = Date.valueOf(getRandomDateString());
     }
 
-    DateWritableV2 dw1 = new DateWritableV2(date1);
-    DateWritableV2 dw2 = new DateWritableV2(date2);
-    DateWritableV2 dw3 = new DateWritableV2(date1);
+    DateWritable dw1 = new DateWritable(date1);
+    DateWritable dw2 = new DateWritable(date2);
+    DateWritable dw3 = new DateWritable(date1);
 
     assertTrue("Dates should be equal", dw1.equals(dw1));
     assertTrue("Dates should be equal", dw1.equals(dw3));
@@ -103,14 +97,14 @@ public class TestDateWritableV2 {
     Date date1 = Date.valueOf(getRandomDateString());
     Date date2 = Date.valueOf(getRandomDateString());
     Date date3 = Date.valueOf(getRandomDateString());
-    DateWritableV2 dw1 = new DateWritableV2(date1);
-    DateWritableV2 dw2 = new DateWritableV2(date2);
-    DateWritableV2 dw3 = new DateWritableV2(date3);
-    DateWritableV2 dw4 = new DateWritableV2();
+    DateWritable dw1 = new DateWritable(date1);
+    DateWritable dw2 = new DateWritable(date2);
+    DateWritable dw3 = new DateWritable(date3);
+    DateWritable dw4 = new DateWritable();
 
     // Getters
     assertEquals(date1, dw1.get());
-    assertEquals(date1.toEpochSecond(), dw1.getTimeInSeconds());
+    assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds());
 
     dw4.set(Date.valueOf("1970-01-02"));
     assertEquals(1, dw4.getDays());
@@ -132,8 +126,8 @@ public class TestDateWritableV2 {
   @Concurrent(count=4)
   @Repeating(repetition=100)
   public void testWritableMethods() throws Throwable {
-    DateWritableV2 dw1 = new DateWritableV2(Date.valueOf(getRandomDateString()));
-    DateWritableV2 dw2 = new DateWritableV2();
+    DateWritable dw1 = new DateWritable(Date.valueOf(getRandomDateString()));
+    DateWritable dw2 = new DateWritable();
     ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
     DataOutput out = new DataOutputStream(byteStream);
 
@@ -157,11 +151,12 @@ public class TestDateWritableV2 {
 
   @BeforeClass
   public static void setupDateStrings() {
+    DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
     Date initialDate = Date.valueOf("2014-01-01");
     Calendar cal = Calendar.getInstance();
-    cal.setTimeInMillis(initialDate.toEpochMilli());
+    cal.setTime(initialDate);
     for (int idx = 0; idx < 365; ++idx) {
-      dateStrings[idx] = Date.ofEpochMilli(cal.getTimeInMillis()).toString();
+      dateStrings[idx] = format.format(cal.getTime());
       cal.add(1, Calendar.DAY_OF_YEAR);
     }
   }
@@ -181,20 +176,21 @@ public class TestDateWritableV2 {
 
     @Override
     public Void call() throws Exception {
-      // Iterate through each day of the year, make sure Date/DateWritableV2 match
+      SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");
+      // Iterate through each day of the year, make sure Date/DateWritable match
       Date originalDate = Date.valueOf("1900-01-01");
       Calendar cal = Calendar.getInstance();
-      cal.setTimeInMillis(originalDate.toEpochMilli());
+      cal.setTimeInMillis(originalDate.getTime());
       for (int idx = 0; idx < 365*200; ++idx) {
-        originalDate = Date.ofEpochMilli(cal.getTimeInMillis());
+        originalDate = new Date(cal.getTimeInMillis());
         // Make sure originalDate is at midnight in the local time zone,
-        // since DateWritableV2 will generate dates at that time.
+        // since DateWritable will generate dates at that time.
         originalDate = Date.valueOf(originalDate.toString());
-        DateWritableV2 dateWritable = new DateWritableV2(originalDate);
-        Date actual = dateWritable.get();
+        DateWritable dateWritable = new DateWritable(originalDate);
+        Date actual = dateWritable.get(false);
         if (!originalDate.equals(actual)) {
-          String originalStr = originalDate.toString();
-          String actualStr = actual.toString();
+          String originalStr = sdf.format(originalDate);
+          String actualStr = sdf.format(actual);
           if (originalStr.substring(0, 10).equals(actualStr.substring(0, 10))) continue;
           bad.add(new DtMismatch(originalStr, actualStr, tz));
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
index 155dc1f..3fe472e 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
@@ -26,6 +26,7 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
@@ -34,16 +35,15 @@ import java.util.List;
 import java.util.Random;
 import java.util.TimeZone;
 
-import org.apache.hadoop.hive.common.type.TimestampUtils;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
 import org.junit.*;
 import static org.junit.Assert.*;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
-public class TestTimestampWritableV2 {
+public class TestTimestampWritable {
 
   @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
   @Rule public RepeatingRule repeatingRule = new RepeatingRule();
@@ -52,9 +52,7 @@ public class TestTimestampWritableV2 {
       new ThreadLocal<DateFormat>() {
         @Override
         protected DateFormat initialValue() {
-          SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-          return formatter;
+          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
         }
       };
 
@@ -70,10 +68,10 @@ public class TestTimestampWritableV2 {
   private static long getSeconds(Timestamp ts) {
     // To compute seconds, we first subtract the milliseconds stored in the nanos field of the
     // Timestamp from the result of getTime().
-    long seconds = (ts.toEpochMilli() - ts.getNanos() / 1000000) / 1000;
+    long seconds = (ts.getTime() - ts.getNanos() / 1000000) / 1000;
 
     // It should also be possible to calculate this based on ts.getTime() only.
-    assertEquals(seconds, TimestampUtils.millisToSeconds(ts.toEpochMilli()));
+    assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getTime()));
 
     return seconds;
   }
@@ -98,17 +96,17 @@ public class TestTimestampWritableV2 {
     return timestampStr;
   }
 
-  private static void assertTSWEquals(TimestampWritableV2 expected, TimestampWritableV2 actual) {
+  private static void assertTSWEquals(TimestampWritable expected, TimestampWritable actual) {
     assertEquals(normalizeTimestampStr(expected.toString()),
                  normalizeTimestampStr(actual.toString()));
     assertEquals(expected, actual);
     assertEquals(expected.getTimestamp(), actual.getTimestamp());
   }
 
-  private static TimestampWritableV2 deserializeFromBytes(byte[] tsBytes) throws IOException {
+  private static TimestampWritable deserializeFromBytes(byte[] tsBytes) throws IOException {
     ByteArrayInputStream bais = new ByteArrayInputStream(tsBytes);
     DataInputStream dis = new DataInputStream(bais);
-    TimestampWritableV2 deserTSW = new TimestampWritableV2();
+    TimestampWritable deserTSW = new TimestampWritable();
     deserTSW.readFields(dis);
     return deserTSW;
   }
@@ -165,13 +163,13 @@ public class TestTimestampWritableV2 {
     return result;
   }
 
-  private static TimestampWritableV2 serializeDeserializeAndCheckTimestamp(Timestamp ts)
+  private static TimestampWritable serializeDeserializeAndCheckTimestamp(Timestamp ts)
       throws IOException {
-    TimestampWritableV2 tsw = new TimestampWritableV2(ts);
+    TimestampWritable tsw = new TimestampWritable(ts);
     assertEquals(ts, tsw.getTimestamp());
 
     byte[] tsBytes = serializeToBytes(tsw);
-    TimestampWritableV2 deserTSW = deserializeFromBytes(tsBytes);
+    TimestampWritable deserTSW = deserializeFromBytes(tsBytes);
     assertTSWEquals(tsw, deserTSW);
     assertEquals(ts, deserTSW.getTimestamp());
     assertEquals(tsBytes.length, tsw.getTotalLength());
@@ -179,13 +177,13 @@ public class TestTimestampWritableV2 {
     // Also convert to/from binary-sortable representation.
     int binarySortableOffset = Math.abs(tsw.hashCode()) % 10;
     byte[] binarySortableBytes = padBytes(tsw.getBinarySortable(), binarySortableOffset);
-    TimestampWritableV2 fromBinSort = new TimestampWritableV2();
+    TimestampWritable fromBinSort = new TimestampWritable();
     fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset);
     assertTSWEquals(tsw, fromBinSort);
 
-    long timeSeconds = ts.toEpochSecond();
+    long timeSeconds = ts.getTime() / 1000;
     if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) {
-      assertEquals(Timestamp.ofEpochSecond(timeSeconds),
+      assertEquals(new Timestamp(timeSeconds * 1000),
         fromIntAndVInts((int) timeSeconds, 0).getTimestamp());
 
       int nanos = reverseNanos(ts.getNanos());
@@ -199,32 +197,32 @@ public class TestTimestampWritableV2 {
 
     // Test various set methods and copy constructors.
     {
-      TimestampWritableV2 tsSet1 = new TimestampWritableV2();
+      TimestampWritable tsSet1 = new TimestampWritable();
       // make the offset non-zero to keep things interesting.
       int offset = Math.abs(ts.hashCode() % 32);
       byte[] shiftedBytes = padBytes(tsBytes, offset);
       tsSet1.set(shiftedBytes, offset);
       assertTSWEquals(tsw, tsSet1);
 
-      TimestampWritableV2 tswShiftedBytes = new TimestampWritableV2(shiftedBytes, offset);
+      TimestampWritable tswShiftedBytes = new TimestampWritable(shiftedBytes, offset);
       assertTSWEquals(tsw, tswShiftedBytes);
       assertTSWEquals(tsw, deserializeFromBytes(serializeToBytes(tswShiftedBytes)));
     }
 
     {
-      TimestampWritableV2 tsSet2 = new TimestampWritableV2();
+      TimestampWritable tsSet2 = new TimestampWritable();
       tsSet2.set(ts);
       assertTSWEquals(tsw, tsSet2);
     }
 
     {
-      TimestampWritableV2 tsSet3 = new TimestampWritableV2();
+      TimestampWritable tsSet3 = new TimestampWritable();
       tsSet3.set(tsw);
       assertTSWEquals(tsw, tsSet3);
     }
 
     {
-      TimestampWritableV2 tsSet4 = new TimestampWritableV2();
+      TimestampWritable tsSet4 = new TimestampWritable();
       tsSet4.set(deserTSW);
       assertTSWEquals(tsw, tsSet4);
     }
@@ -253,7 +251,7 @@ public class TestTimestampWritableV2 {
     assertEquals(serializeDeserializeAndCheckTimestamp(ts).getNanos(), nanos);
   }
 
-  private static TimestampWritableV2 fromIntAndVInts(int i, long... vints) throws IOException {
+  private static TimestampWritable fromIntAndVInts(int i, long... vints) throws IOException {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     DataOutputStream dos = new DataOutputStream(baos);
     dos.writeInt(i);
@@ -263,7 +261,7 @@ public class TestTimestampWritableV2 {
       }
     }
     byte[] bytes = baos.toByteArray();
-    TimestampWritableV2 tsw = deserializeFromBytes(bytes);
+    TimestampWritable tsw = deserializeFromBytes(bytes);
     assertEquals(toList(bytes), toList(serializeToBytes(tsw)));
     return tsw;
   }
@@ -290,7 +288,7 @@ public class TestTimestampWritableV2 {
     Random rand = new Random(294722773L);
     for (int i = 0; i < 10000; ++i) {
       long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
-      checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand));
+      checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand));
     }
   }
 
@@ -308,7 +306,7 @@ public class TestTimestampWritableV2 {
     Random rand = new Random(789149717L);
     for (int i = 0; i < 10000; ++i) {
       long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
-      checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand));
+      checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand));
     }
   }
 
@@ -317,7 +315,7 @@ public class TestTimestampWritableV2 {
   public void testTimestampsInFullRange() throws IOException {
     Random rand = new Random(2904974913L);
     for (int i = 0; i < 10000; ++i) {
-      checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(rand.nextLong()), randomNanos(rand));
+      checkTimestampWithAndWithoutNanos(new Timestamp(rand.nextLong()), randomNanos(rand));
     }
   }
 
@@ -328,9 +326,10 @@ public class TestTimestampWritableV2 {
     for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
       for (int i = 0; i < 10000; ++i) {
         long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
+        Timestamp ts = new Timestamp(millis);
         int nanos = randomNanos(rand, nanosPrecision);
-        Timestamp ts = Timestamp.ofEpochMilli(millis, nanos);
-        TimestampWritableV2 tsw = new TimestampWritableV2(ts);
+        ts.setNanos(nanos);
+        TimestampWritable tsw = new TimestampWritable(ts);
         double asDouble = tsw.getDouble();
         int recoveredNanos =
           (int) (Math.round((asDouble - Math.floor(asDouble)) * Math.pow(10, nanosPrecision)) *
@@ -357,7 +356,7 @@ public class TestTimestampWritableV2 {
   public void testDecimalToTimestampRandomly() {
     Random rand = new Random(294729777L);
     for (int i = 0; i < 10000; ++i) {
-      Timestamp ts = Timestamp.ofEpochMilli(
+      Timestamp ts = new Timestamp(
           randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand));
       ts.setNanos(randomNanos(rand, 9));  // full precision
       assertEquals(ts, TimestampUtils.decimalToTimestamp(timestampToDecimal(ts)));
@@ -368,8 +367,8 @@ public class TestTimestampWritableV2 {
   @Concurrent(count=4)
   @Repeating(repetition=100)
   public void testDecimalToTimestampCornerCases() {
-    Timestamp ts = Timestamp.ofEpochMilli(parseToMillis("1969-03-04 05:44:33"));
-    assertEquals(0, ts.toEpochMilli() % 1000);
+    Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
+    assertEquals(0, ts.getTime() % 1000);
     for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) {
       ts.setNanos(nanos);
       HiveDecimal d = timestampToDecimal(ts);
@@ -450,7 +449,7 @@ public class TestTimestampWritableV2 {
     assertEquals(-100, TimestampUtils .millisToSeconds(-100000));
     assertEquals(1, TimestampUtils .millisToSeconds(1500));
     assertEquals(19, TimestampUtils .millisToSeconds(19999));
-    assertEquals(20, TimestampUtils.millisToSeconds(20000));
+    assertEquals(20, TimestampUtils .millisToSeconds(20000));
   }
 
   private static int compareEqualLengthByteArrays(byte[] a, byte[] b) {
@@ -472,14 +471,15 @@ public class TestTimestampWritableV2 {
   @Repeating(repetition=100)
   public void testBinarySortable() {
     Random rand = new Random(5972977L);
-    List<TimestampWritableV2> tswList = new ArrayList<TimestampWritableV2>();
+    List<TimestampWritable> tswList = new ArrayList<TimestampWritable>();
     for (int i = 0; i < 50; ++i) {
-      Timestamp ts = Timestamp.ofEpochMilli(rand.nextLong(), randomNanos(rand));
-      tswList.add(new TimestampWritableV2(ts));
+      Timestamp ts = new Timestamp(rand.nextLong());
+      ts.setNanos(randomNanos(rand));
+      tswList.add(new TimestampWritable(ts));
     }
-    for (TimestampWritableV2 tsw1 : tswList) {
+    for (TimestampWritable tsw1 : tswList) {
       byte[] bs1 = tsw1.getBinarySortable();
-      for (TimestampWritableV2 tsw2 : tswList) {
+      for (TimestampWritable tsw2 : tswList) {
         byte[] bs2 = tsw2.getBinarySortable();
         int binaryComparisonResult =
           normalizeComparisonResult(compareEqualLengthByteArrays(bs1, bs2));
@@ -509,11 +509,11 @@ public class TestTimestampWritableV2 {
   }
 
   private static void verifySetTimestamp(long time) {
-    Timestamp t1 = Timestamp.ofEpochMilli(time);
-    TimestampWritableV2 writable = new TimestampWritableV2(t1);
+    Timestamp t1 = new Timestamp(time);
+    TimestampWritable writable = new TimestampWritable(t1);
     byte[] bytes = writable.getBytes();
-    Timestamp t2 = new Timestamp();
-    TimestampWritableV2.setTimestamp(t2, bytes, 0);
+    Timestamp t2 = new Timestamp(0);
+    TimestampWritable.setTimestamp(t2, bytes, 0);
     assertEquals(t1, t2);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
index 79bf5fb..fb5dec1 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
@@ -17,10 +17,11 @@
  */
 package org.apache.hadoop.hive.serde2.lazy;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
index 398dc5c..6dcc6f8 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
@@ -17,23 +17,24 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
+import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
index 49df56c..732bd42 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
@@ -17,16 +17,17 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Random;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
index 2c488b0..17b844c 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -25,14 +26,13 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -95,7 +95,7 @@ public class TestStandardObjectInspectors extends TestCase {
       doTestStandardPrimitiveObjectInspector(DoubleWritable.class, Double.class);
       doTestStandardPrimitiveObjectInspector(Text.class, String.class);
       doTestStandardPrimitiveObjectInspector(BytesWritable.class, byte[].class);
-      doTestStandardPrimitiveObjectInspector(TimestampWritableV2.class, Timestamp.class);
+      doTestStandardPrimitiveObjectInspector(TimestampWritable.class, Timestamp.class);
     } catch (Throwable e) {
       e.printStackTrace();
       throw e;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
index 3c2797e..a3fd7b5 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/primitive/TestPrimitiveObjectInspectorUtils.java
@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.TimeZone;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
@@ -75,6 +75,7 @@ public class TestPrimitiveObjectInspectorUtils extends TestCase {
 
   @Test
   public void testgetTimestampWithMillisecondsInt() {
+    DateFormat localDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
     DateFormat gmtDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
     gmtDateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
@@ -84,68 +85,69 @@ public class TestPrimitiveObjectInspectorUtils extends TestCase {
 
     PrimitiveObjectInspector booleanOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.BOOLEAN);
-    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true, booleanOI).toSqlTimestamp()));
-    assertEquals("1970-01-01 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false, booleanOI).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true, booleanOI)));
+    assertEquals("1970-01-01 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false, booleanOI)));
 
     PrimitiveObjectInspector byteOI = PrimitiveObjectInspectorFactory
       .getPrimitiveJavaObjectInspector(PrimitiveCategory.BYTE);
-    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1, byteOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1, byteOI).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1, byteOI)));
+    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1, byteOI)));
 
     PrimitiveObjectInspector shortOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.SHORT);
-    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1, shortOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1, shortOI).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:00.001", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1, shortOI)));
+    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1, shortOI)));
 
     PrimitiveObjectInspector intOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.INT);
-    assertEquals("1970-01-17 11:22:01.282", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282, intOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1, intOI).toSqlTimestamp()));
+    assertEquals("1970-01-17 11:22:01.282", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282, intOI)));
+    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1, intOI)));
 
     PrimitiveObjectInspector longOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.LONG);
-    assertEquals("1970-01-17 11:22:01.282", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L, longOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L, longOI).toSqlTimestamp()));
+    assertEquals("1970-01-17 11:22:01.282", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L, longOI)));
+    assertEquals("1969-12-31 23:59:59.999", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L, longOI)));
 
       // Float loses some precisions
     PrimitiveObjectInspector floatOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.FLOAT);
-    assertEquals("2015-02-07 15:02:24.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f, floatOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:58.876", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f, floatOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:02:24.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f, floatOI)));
+    assertEquals("1969-12-31 23:59:58.876", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f, floatOI)));
 
     PrimitiveObjectInspector doubleOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DOUBLE);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123, doubleOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:58.877", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123, doubleOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123, doubleOI)));
+    assertEquals("1969-12-31 23:59:58.877", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123, doubleOI)));
 
     PrimitiveObjectInspector decimalOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DECIMAL);
-    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L), decimalOI).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1), decimalOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L), decimalOI)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1), decimalOI)));
 
     PrimitiveObjectInspector stringOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123", stringOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123", stringOI)));
 
     PrimitiveObjectInspector charOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.CHAR);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123", 30), charOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123", 30), charOI)));
 
     PrimitiveObjectInspector varcharOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.VARCHAR);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30), varcharOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30), varcharOI)));
 
     PrimitiveObjectInspector dateOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DATE);
-    assertEquals("2015-02-07 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(Date.ofEpochMilli(1423321282123L), dateOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 00:00:00.000", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Date(1423321282123L), dateOI)));
 
     PrimitiveObjectInspector timestampOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.TIMESTAMP);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(Timestamp.ofEpochMilli(1423321282123L), timestampOI).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Timestamp(1423321282123L), timestampOI)));
   }
 
   @Test
   public void testgetTimestampWithSecondsInt() {
+    DateFormat localDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
     DateFormat gmtDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
     gmtDateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
 
@@ -155,72 +157,70 @@ public class TestPrimitiveObjectInspectorUtils extends TestCase {
 
     PrimitiveObjectInspector booleanOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.BOOLEAN);
-    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true, booleanOI, true).toSqlTimestamp()));
-    assertEquals("1970-01-01 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false, booleanOI, true).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(true, booleanOI, true)));
+    assertEquals("1970-01-01 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(false, booleanOI, true)));
 
     PrimitiveObjectInspector byteOI = PrimitiveObjectInspectorFactory
       .getPrimitiveJavaObjectInspector(PrimitiveCategory.BYTE);
-    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1, byteOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1, byteOI, true).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)1, byteOI, true)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((byte)-1, byteOI, true)));
 
     PrimitiveObjectInspector shortOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.SHORT);
-    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1, shortOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1, shortOI, true).toSqlTimestamp()));
+    assertEquals("1970-01-01 00:00:01.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)1, shortOI, true)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((short)-1, shortOI, true)));
 
     PrimitiveObjectInspector intOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.INT);
-    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282, intOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1, intOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)1423321282, intOI, true)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((int)-1, intOI, true)));
 
     PrimitiveObjectInspector longOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.LONG);
-    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L, longOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L, longOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282L, longOI, true)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1L, longOI, true)));
 
       // Float loses some precisions
     PrimitiveObjectInspector floatOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.FLOAT);
-    assertEquals("2015-02-07 15:02:24.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f, floatOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:58.876", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f, floatOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:02:24.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(1423321282.123f, floatOI, true)));
+    assertEquals("1969-12-31 23:59:58.876", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(-1.123f, floatOI, true)));
 
     PrimitiveObjectInspector doubleOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DOUBLE);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123, doubleOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:58.877", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123, doubleOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)1423321282.123, doubleOI, true)));
+    assertEquals("1969-12-31 23:59:58.877", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp((double)-1.123, doubleOI, true)));
 
     PrimitiveObjectInspector decimalOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DECIMAL);
-    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L), decimalOI, true).toSqlTimestamp()));
-    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1), decimalOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(1423321282L), decimalOI, true)));
+    assertEquals("1969-12-31 23:59:59.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(HiveDecimal.create(-1), decimalOI, true)));
 
     PrimitiveObjectInspector stringOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123", stringOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp("2015-02-07 15:01:22.123", stringOI, true)));
 
     PrimitiveObjectInspector charOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.CHAR);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123", 30), charOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveChar("2015-02-07 15:01:22.123", 30), charOI, true)));
 
     PrimitiveObjectInspector varcharOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.VARCHAR);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30), varcharOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new HiveVarchar("2015-02-07 15:01:22.123",30), varcharOI, true)));
 
     PrimitiveObjectInspector dateOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.DATE);
-    assertEquals("2015-02-07 00:00:00.000", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(Date.ofEpochMilli(1423321282123L), dateOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 00:00:00.000", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Date(1423321282123L), dateOI, true)));
 
     PrimitiveObjectInspector timestampOI = PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(PrimitiveCategory.TIMESTAMP);
-    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(Timestamp.ofEpochMilli(1423321282123L), timestampOI, true).toSqlTimestamp()));
+    assertEquals("2015-02-07 15:01:22.123", gmtDateFormat.format(PrimitiveObjectInspectorUtils.getTimestamp(new Timestamp(1423321282123L), timestampOI, true)));
   }
 
   @Test
   public void testGetTimestampFromString() {
-    DateFormat udfDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
-    udfDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-    assertEquals("2015-02-07 00:00:00.000", udfDateFormat.format(
-        PrimitiveObjectInspectorUtils.getTimestampFromString("2015-02-07").toSqlTimestamp()));
+    DateFormat localDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
+    assertEquals("2015-02-07 00:00:00.000", localDateFormat.format(PrimitiveObjectInspectorUtils.getTimestampFromString("2015-02-07")));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index f45b71f..51e081b 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -1284,18 +1284,13 @@ class MetaStoreDirectSql {
       if (colType == FilterType.Date && valType == FilterType.String) {
         // Filter.g cannot parse a quoted date; try to parse date here too.
         try {
-          nodeValue = MetaStoreUtils.PARTITION_DATE_FORMAT.get().parse((String)nodeValue);
+          nodeValue = new java.sql.Date(
+              org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.PARTITION_DATE_FORMAT.get().parse((String)nodeValue).getTime());
           valType = FilterType.Date;
         } catch (ParseException pe) { // do nothing, handled below - types will mismatch
         }
       }
 
-      // We format it so we are sure we are getting the right value
-      if (valType == FilterType.Date) {
-        // Format
-        nodeValue = MetaStoreUtils.PARTITION_DATE_FORMAT.get().format(nodeValue);
-      }
-
       if (colType != valType) {
         // It's not clear how filtering for e.g. "stringCol > 5" should work (which side is
         // to be coerced?). Let the expression evaluation sort this one out, not metastore.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g
index 4559383..81111a0 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g
@@ -38,7 +38,6 @@ package org.apache.hadoop.hive.metastore.parser;
 import java.sql.Date;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 }
@@ -53,7 +52,6 @@ import java.util.regex.Pattern;
     protected SimpleDateFormat initialValue() {
       SimpleDateFormat val = new SimpleDateFormat("yyyy-MM-dd");
       val.setLenient(false); // Without this, 2020-20-20 becomes 2021-08-20.
-      val.setTimeZone(TimeZone.getTimeZone("UTC"));
       return val;
     };
   };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
index 32850fd..16a8c75 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java
@@ -96,7 +96,6 @@ import java.util.Map.Entry;
 import java.util.SortedMap;
 import java.util.SortedSet;
 import java.util.StringJoiner;
-import java.util.TimeZone;
 import java.util.TreeMap;
 import java.util.TreeSet;
 import java.util.concurrent.Callable;
@@ -115,7 +114,6 @@ public class MetaStoreUtils {
     protected DateFormat initialValue() {
       DateFormat val = new SimpleDateFormat("yyyy-MM-dd");
       val.setLenient(false); // Without this, 2020-20-20 becomes 2021-08-20.
-      val.setTimeZone(TimeZone.getTimeZone("UTC"));
       return val;
     }
   };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
----------------------------------------------------------------------
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
index f11a319..3b5f3ba 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
@@ -81,7 +81,7 @@ public class TimestampColumnVector extends ColumnVector {
 
     scratchWritable = null;     // Allocated by caller.
 
-    isUTC = true;
+    isUTC = false;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
----------------------------------------------------------------------
diff --git a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
index 3894e09..6325d5d 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.io.WritableUtils;
  *    YYYY-MM-DD
  *
  */
-@Deprecated
 public class DateWritable implements WritableComparable<DateWritable> {
 
   private static final long MILLIS_PER_DAY = TimeUnit.DAYS.toMillis(1);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
----------------------------------------------------------------------
diff --git a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
index fbd0c06..3d9f262 100644
--- a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
+++ b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
@@ -110,7 +110,7 @@ public class TestStructColumnVector {
     batch.cols[0] = x;
     batch.cols[1] = y;
     batch.reset();
-    Timestamp ts = new Timestamp(946684800000L);
+    Timestamp ts = Timestamp.valueOf("2000-01-01 00:00:00");
     for(int r=0; r < 10; ++r) {
       batch.size += 1;
       x1.vector[r] = 3 * r;
@@ -120,16 +120,16 @@ public class TestStructColumnVector {
       y.setRef(r, buffer, 0, buffer.length);
     }
     final String EXPECTED = ("Column vector types: 0:STRUCT<LONG, TIMESTAMP>, 1:BYTES\n" +
-        "[[0, 2000-01-01 00:00:01], \"value 0\"]\n" +
-        "[[3, 2000-01-01 00:00:02], \"value 1\"]\n" +
-        "[[6, 2000-01-01 00:00:03], \"value 2\"]\n" +
-        "[[9, 2000-01-01 00:00:04], \"value 3\"]\n" +
-        "[[12, 2000-01-01 00:00:05], \"value 4\"]\n" +
-        "[[15, 2000-01-01 00:00:06], \"value 5\"]\n" +
-        "[[18, 2000-01-01 00:00:07], \"value 6\"]\n" +
-        "[[21, 2000-01-01 00:00:08], \"value 7\"]\n" +
-        "[[24, 2000-01-01 00:00:09], \"value 8\"]\n" +
-        "[[27, 2000-01-01 00:00:10], \"value 9\"]");
+        "[[0, 2000-01-01 00:00:01.0], \"value 0\"]\n" +
+        "[[3, 2000-01-01 00:00:02.0], \"value 1\"]\n" +
+        "[[6, 2000-01-01 00:00:03.0], \"value 2\"]\n" +
+        "[[9, 2000-01-01 00:00:04.0], \"value 3\"]\n" +
+        "[[12, 2000-01-01 00:00:05.0], \"value 4\"]\n" +
+        "[[15, 2000-01-01 00:00:06.0], \"value 5\"]\n" +
+        "[[18, 2000-01-01 00:00:07.0], \"value 6\"]\n" +
+        "[[21, 2000-01-01 00:00:08.0], \"value 7\"]\n" +
+        "[[24, 2000-01-01 00:00:09.0], \"value 8\"]\n" +
+        "[[27, 2000-01-01 00:00:10.0], \"value 9\"]");
     assertEquals(EXPECTED, batch.toString());
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
----------------------------------------------------------------------
diff --git a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
index 8b36371..6d7ed3e 100644
--- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
+++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
@@ -1589,14 +1589,14 @@ public class GenVectorCode extends Task {
       vectorType = "long";
       getPrimitiveMethod = "getDate";
       getValueMethod = "";
-      conversionMethod = "DateWritableV2.dateToDays";
+      conversionMethod = "DateWritable.dateToDays";
       // Special case - Date requires its own specific BetweenDynamicValue class, but derives from FilterLongColumnBetween
       typeName = "Long";
     } else if (operandType.equals("timestamp")) {
       defaultValue = "new Timestamp(0)";
       vectorType = "Timestamp";
       getPrimitiveMethod = "getTimestamp";
-      getValueMethod = ".toSqlTimestamp()";
+      getValueMethod = "";
       conversionMethod = "";
     } else {
       throw new IllegalArgumentException("Type " + operandType + " not supported");
@@ -3159,7 +3159,8 @@ public class GenVectorCode extends Task {
   private String getDTIScalarColumnDisplayBody(String type) {
     if (type.equals("date")) {
       return
-          "Date dt = Date.ofEpochMilli(DateWritableV2.daysToMillis((int) value));\n" +
+          "Date dt = new Date(0);" +
+          "    dt.setTime(DateWritable.daysToMillis((int) value));\n" +
           "    return  \"date \" + dt.toString() + \", \" + getColumnParamString(0, colNum);";
     } else {
       return
@@ -3170,7 +3171,8 @@ public class GenVectorCode extends Task {
   private String getDTIColumnScalarDisplayBody(String type) {
     if (type.equals("date")) {
       return
-          "Date dt = Date.ofEpochMilli(DateWritableV2.daysToMillis((int) value));\n" +
+          "Date dt = new Date(0);" +
+          "    dt.setTime(DateWritable.daysToMillis((int) value));\n" +
           "    return getColumnParamString(0, colNum) + \", date \" + dt.toString();";
     } else {
       return
@@ -3840,9 +3842,9 @@ public class GenVectorCode extends Task {
     } else if (primitiveType.equals("interval_day_time")) {
       return "HiveIntervalDayTimeWritable";
     } else if (primitiveType.equals("date")) {
-      return "DateWritableV2";
+      return "HiveDateWritable";
     } else if (primitiveType.equals("timestamp")) {
-      return "TimestampWritable";
+      return "HiveTimestampWritable";
     }
     throw new Exception("Unimplemented primitive output writable: " + primitiveType);
   }


[04/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
index a2e0fdd..84f9573 100644
--- a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
@@ -262,7 +262,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), day(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') (type: int)
+              expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), dayofmonth(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
               Select Vectorization:
                   className: VectorSelectOperator
@@ -348,143 +348,143 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
 #### A masked pattern was here ####
 fl_time	_c1	_c2	_c3	_c4	_c5	_c6	_c7	_c8	_c9	_c10	_c11	_c12	_c13	_c14	_c15	_c16	_c17	_c18	_c19
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   fl_date,
   to_unix_timestamp(fl_date),
@@ -550,7 +550,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), day(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59') (type: int)
+              expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), dayofmonth(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
               Select Vectorization:
                   className: VectorSelectOperator
@@ -636,143 +636,143 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
 #### A masked pattern was here ####
 fl_date	_c1	_c2	_c3	_c4	_c5	_c6	_c7	_c8	_c9	_c10	_c11	_c12	_c13	_c14	_c15	_c16	_c17	_c18	_c19
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   fl_time,
   fl_date,
@@ -842,7 +842,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') = datediff(fl_date, TIME
 STAMP'2000-01-01 11:13:09')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
+              expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofmonth(fl_time) = dayofmonth(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0') = da
 tediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21
               Select Vectorization:
                   className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
index f196c5a..f845873 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
@@ -76,7 +76,7 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652058 23:59:59.999999999
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
 PREHOOK: query: SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@test_n2
@@ -201,7 +201,7 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652058 23:59:59.999999999
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
 PREHOOK: type: QUERY
@@ -230,8 +230,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-02 16:00:00.0, 0002-02-03 16:00:00.0])
-              predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00', TIMESTAMP'0002-02-02 00:00:00') (type: boolean)
+                  predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-01 00:00:00.0, 0002-02-02 00:00:00.0])
+              predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00.0', TIMESTAMP'0002-02-02 00:00:00.0') (type: boolean)
               Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: ts (type: timestamp)
@@ -388,7 +388,7 @@ POSTHOOK: query: SELECT AVG(ts), CAST(AVG(ts) AS TIMESTAMP) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-9.5633352E10	5000-07-02 12:00:00
+9.56332944E10	5000-07-01 13:00:00
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev(ts), stddev_pop(ts), stddev_samp(ts) FROM test_n2
 PREHOOK: type: QUERY
@@ -495,4 +495,4 @@ POSTHOOK: query: SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-2.4891041205457024E22	2.4891041205457024E22	4.978208241091405E22	1.577689488E11	1.577689488E11	1.577689488E11	2.2311898711430646E11
+2.489106846793884E22	2.489106846793884E22	4.978213693587768E22	1.577690352E11	1.577690352E11	1.577690352E11	2.2311910930235822E11


[14/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge12.q.out b/ql/src/test/results/clientpositive/orc_merge12.q.out
index 045ba42..f010c94 100644
--- a/ql/src/test/results/clientpositive/orc_merge12.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge12.q.out
@@ -111,7 +111,7 @@ POSTHOOK: query: select sum(hash(*)) from alltypesorc3xcols
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc3xcols
 #### A masked pattern was here ####
--73477136966
+-302946892512
 PREHOOK: query: alter table alltypesorc3xcols concatenate
 PREHOOK: type: ALTER_TABLE_MERGE
 PREHOOK: Input: default@alltypesorc3xcols
@@ -137,7 +137,7 @@ POSTHOOK: query: select sum(hash(*)) from alltypesorc3xcols
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc3xcols
 #### A masked pattern was here ####
--73477136966
+-302946892512
 PREHOOK: query: select * from alltypesorc3xcols limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc3xcols
@@ -147,8 +147,8 @@ PREHOOK: Input: default@alltypesorc3xcols
 File Version: 0.12 with ORC_135
 Rows: 24576
 Compression: ZLIB
-Compression size: 131072
-Type: struct<atinyint:tinyint,asmallint:smallint,aint:int,abigint:bigint,afloat:float,adouble:double,astring1:string,astring2:string,atimestamp1:timestamp,atimestamp2:timestamp,aboolean1:boolean,aboolean2:boolean,btinyint:tinyint,bsmallint:smallint,bint:int,bbigint:bigint,bfloat:float,bdouble:double,bstring1:string,bstring2:string,btimestamp1:timestamp,btimestamp2:timestamp,bboolean1:boolean,bboolean2:boolean,ctinyint:tinyint,csmallint:smallint,cint:int,cbigint:bigint,cfloat:float,cdouble:double,cstring1:string,cstring2:string,ctimestamp1:timestamp,ctimestamp2:timestamp,cboolean1:boolean,cboolean2:boolean>
+Compression size: 262144
+Type: struct<_col0:tinyint,_col1:smallint,_col2:int,_col3:bigint,_col4:float,_col5:double,_col6:string,_col7:string,_col8:timestamp,_col9:timestamp,_col10:boolean,_col11:boolean,_col12:tinyint,_col13:smallint,_col14:int,_col15:bigint,_col16:float,_col17:double,_col18:string,_col19:string,_col20:timestamp,_col21:timestamp,_col22:boolean,_col23:boolean,_col24:tinyint,_col25:smallint,_col26:int,_col27:bigint,_col28:float,_col29:double,_col30:string,_col31:string,_col32:timestamp,_col33:timestamp,_col34:boolean,_col35:boolean>
 
 Stripe Statistics:
   Stripe 1:
@@ -161,8 +161,8 @@ Stripe Statistics:
     Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
     Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
-    Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 11: count: 9174 hasNull: true true: 6138
     Column 12: count: 9173 hasNull: true true: 3983
     Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -173,8 +173,8 @@ Stripe Statistics:
     Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 23: count: 9174 hasNull: true true: 6138
     Column 24: count: 9173 hasNull: true true: 3983
     Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -185,8 +185,8 @@ Stripe Statistics:
     Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 35: count: 9174 hasNull: true true: 6138
     Column 36: count: 9173 hasNull: true true: 3983
   Stripe 2:
@@ -199,8 +199,8 @@ Stripe Statistics:
     Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
     Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
-    Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 11: count: 9174 hasNull: true true: 6138
     Column 12: count: 9173 hasNull: true true: 3983
     Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -211,8 +211,8 @@ Stripe Statistics:
     Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 23: count: 9174 hasNull: true true: 6138
     Column 24: count: 9173 hasNull: true true: 3983
     Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -223,8 +223,8 @@ Stripe Statistics:
     Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 35: count: 9174 hasNull: true true: 6138
     Column 36: count: 9173 hasNull: true true: 3983
 
@@ -238,8 +238,8 @@ File Statistics:
   Column 6: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 7: count: 24576 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 884736
   Column 8: count: 24576 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 1769472
-  Column 9: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 10: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 9: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 10: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 11: count: 18348 hasNull: true true: 12276
   Column 12: count: 18346 hasNull: true true: 7966
   Column 13: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -250,8 +250,8 @@ File Statistics:
   Column 18: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 19: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
   Column 20: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
-  Column 21: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 22: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 21: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 22: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 23: count: 18348 hasNull: true true: 12276
   Column 24: count: 18346 hasNull: true true: 7966
   Column 25: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -262,136 +262,136 @@ File Statistics:
   Column 30: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 31: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
   Column 32: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
-  Column 33: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 34: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 33: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 34: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 35: count: 18348 hasNull: true true: 12276
   Column 36: count: 18346 hasNull: true true: 7966
 
 Stripes:
-  Stripe: offset: 3 data: 1498336 rows: 12288 tail: 493 index: 2821
+  Stripe: offset: 3 data: 1500017 rows: 12288 tail: 501 index: 2836
     Stream: column 0 section ROW_INDEX start: 3 length 21
     Stream: column 1 section ROW_INDEX start: 24 length 53
     Stream: column 2 section ROW_INDEX start: 77 length 67
-    Stream: column 3 section ROW_INDEX start: 144 length 79
-    Stream: column 4 section ROW_INDEX start: 223 length 83
-    Stream: column 5 section ROW_INDEX start: 306 length 77
-    Stream: column 6 section ROW_INDEX start: 383 length 77
-    Stream: column 7 section ROW_INDEX start: 460 length 170
-    Stream: column 8 section ROW_INDEX start: 630 length 264
-    Stream: column 9 section ROW_INDEX start: 894 length 63
-    Stream: column 10 section ROW_INDEX start: 957 length 57
-    Stream: column 11 section ROW_INDEX start: 1014 length 47
-    Stream: column 12 section ROW_INDEX start: 1061 length 47
-    Stream: column 13 section ROW_INDEX start: 1108 length 53
-    Stream: column 14 section ROW_INDEX start: 1161 length 67
-    Stream: column 15 section ROW_INDEX start: 1228 length 79
-    Stream: column 16 section ROW_INDEX start: 1307 length 83
-    Stream: column 17 section ROW_INDEX start: 1390 length 77
-    Stream: column 18 section ROW_INDEX start: 1467 length 77
-    Stream: column 19 section ROW_INDEX start: 1544 length 115
-    Stream: column 20 section ROW_INDEX start: 1659 length 93
-    Stream: column 21 section ROW_INDEX start: 1752 length 63
-    Stream: column 22 section ROW_INDEX start: 1815 length 57
-    Stream: column 23 section ROW_INDEX start: 1872 length 47
-    Stream: column 24 section ROW_INDEX start: 1919 length 47
-    Stream: column 25 section ROW_INDEX start: 1966 length 53
-    Stream: column 26 section ROW_INDEX start: 2019 length 67
-    Stream: column 27 section ROW_INDEX start: 2086 length 79
-    Stream: column 28 section ROW_INDEX start: 2165 length 83
-    Stream: column 29 section ROW_INDEX start: 2248 length 77
-    Stream: column 30 section ROW_INDEX start: 2325 length 77
-    Stream: column 31 section ROW_INDEX start: 2402 length 115
-    Stream: column 32 section ROW_INDEX start: 2517 length 93
-    Stream: column 33 section ROW_INDEX start: 2610 length 63
-    Stream: column 34 section ROW_INDEX start: 2673 length 57
-    Stream: column 35 section ROW_INDEX start: 2730 length 47
-    Stream: column 36 section ROW_INDEX start: 2777 length 47
-    Stream: column 1 section PRESENT start: 2824 length 51
-    Stream: column 1 section DATA start: 2875 length 5448
-    Stream: column 2 section PRESENT start: 8323 length 53
-    Stream: column 2 section DATA start: 8376 length 12078
-    Stream: column 3 section PRESENT start: 20454 length 53
-    Stream: column 3 section DATA start: 20507 length 24479
-    Stream: column 4 section PRESENT start: 44986 length 52
-    Stream: column 4 section DATA start: 45038 length 24479
-    Stream: column 5 section PRESENT start: 69517 length 51
-    Stream: column 5 section DATA start: 69568 length 9927
-    Stream: column 6 section PRESENT start: 79495 length 53
-    Stream: column 6 section DATA start: 79548 length 19755
-    Stream: column 7 section DATA start: 99303 length 259558
-    Stream: column 7 section LENGTH start: 358861 length 12
-    Stream: column 8 section DATA start: 358873 length 518777
-    Stream: column 8 section LENGTH start: 877650 length 12
-    Stream: column 9 section PRESENT start: 877662 length 52
-    Stream: column 9 section DATA start: 877714 length 7769
-    Stream: column 9 section SECONDARY start: 885483 length 9448
-    Stream: column 10 section PRESENT start: 894931 length 58
-    Stream: column 10 section DATA start: 894989 length 7778
-    Stream: column 10 section SECONDARY start: 902767 length 9469
-    Stream: column 11 section PRESENT start: 912236 length 51
-    Stream: column 11 section DATA start: 912287 length 782
-    Stream: column 12 section PRESENT start: 913069 length 54
-    Stream: column 12 section DATA start: 913123 length 783
-    Stream: column 13 section PRESENT start: 913906 length 51
-    Stream: column 13 section DATA start: 913957 length 5448
-    Stream: column 14 section PRESENT start: 919405 length 53
-    Stream: column 14 section DATA start: 919458 length 12078
-    Stream: column 15 section PRESENT start: 931536 length 53
-    Stream: column 15 section DATA start: 931589 length 24479
-    Stream: column 16 section PRESENT start: 956068 length 52
-    Stream: column 16 section DATA start: 956120 length 24479
-    Stream: column 17 section PRESENT start: 980599 length 51
-    Stream: column 17 section DATA start: 980650 length 9927
-    Stream: column 18 section PRESENT start: 990577 length 53
-    Stream: column 18 section DATA start: 990630 length 19755
-    Stream: column 19 section PRESENT start: 1010385 length 51
-    Stream: column 19 section DATA start: 1010436 length 10942
-    Stream: column 19 section LENGTH start: 1021378 length 3722
-    Stream: column 19 section DICTIONARY_DATA start: 1025100 length 65435
-    Stream: column 20 section PRESENT start: 1090535 length 54
-    Stream: column 20 section DATA start: 1090589 length 10939
-    Stream: column 20 section LENGTH start: 1101528 length 3739
-    Stream: column 20 section DICTIONARY_DATA start: 1105267 length 66022
-    Stream: column 21 section PRESENT start: 1171289 length 52
-    Stream: column 21 section DATA start: 1171341 length 7769
-    Stream: column 21 section SECONDARY start: 1179110 length 9448
-    Stream: column 22 section PRESENT start: 1188558 length 58
-    Stream: column 22 section DATA start: 1188616 length 7778
-    Stream: column 22 section SECONDARY start: 1196394 length 9469
-    Stream: column 23 section PRESENT start: 1205863 length 51
-    Stream: column 23 section DATA start: 1205914 length 782
-    Stream: column 24 section PRESENT start: 1206696 length 54
-    Stream: column 24 section DATA start: 1206750 length 783
-    Stream: column 25 section PRESENT start: 1207533 length 51
-    Stream: column 25 section DATA start: 1207584 length 5448
-    Stream: column 26 section PRESENT start: 1213032 length 53
-    Stream: column 26 section DATA start: 1213085 length 12078
-    Stream: column 27 section PRESENT start: 1225163 length 53
-    Stream: column 27 section DATA start: 1225216 length 24479
-    Stream: column 28 section PRESENT start: 1249695 length 52
-    Stream: column 28 section DATA start: 1249747 length 24479
-    Stream: column 29 section PRESENT start: 1274226 length 51
-    Stream: column 29 section DATA start: 1274277 length 9927
-    Stream: column 30 section PRESENT start: 1284204 length 53
-    Stream: column 30 section DATA start: 1284257 length 19755
-    Stream: column 31 section PRESENT start: 1304012 length 51
-    Stream: column 31 section DATA start: 1304063 length 10942
-    Stream: column 31 section LENGTH start: 1315005 length 3722
-    Stream: column 31 section DICTIONARY_DATA start: 1318727 length 65435
-    Stream: column 32 section PRESENT start: 1384162 length 54
-    Stream: column 32 section DATA start: 1384216 length 10939
-    Stream: column 32 section LENGTH start: 1395155 length 3739
-    Stream: column 32 section DICTIONARY_DATA start: 1398894 length 66022
-    Stream: column 33 section PRESENT start: 1464916 length 52
-    Stream: column 33 section DATA start: 1464968 length 7769
-    Stream: column 33 section SECONDARY start: 1472737 length 9448
-    Stream: column 34 section PRESENT start: 1482185 length 58
-    Stream: column 34 section DATA start: 1482243 length 7778
-    Stream: column 34 section SECONDARY start: 1490021 length 9469
-    Stream: column 35 section PRESENT start: 1499490 length 51
-    Stream: column 35 section DATA start: 1499541 length 782
-    Stream: column 36 section PRESENT start: 1500323 length 54
-    Stream: column 36 section DATA start: 1500377 length 783
+    Stream: column 3 section ROW_INDEX start: 144 length 81
+    Stream: column 4 section ROW_INDEX start: 225 length 83
+    Stream: column 5 section ROW_INDEX start: 308 length 77
+    Stream: column 6 section ROW_INDEX start: 385 length 77
+    Stream: column 7 section ROW_INDEX start: 462 length 176
+    Stream: column 8 section ROW_INDEX start: 638 length 267
+    Stream: column 9 section ROW_INDEX start: 905 length 63
+    Stream: column 10 section ROW_INDEX start: 968 length 57
+    Stream: column 11 section ROW_INDEX start: 1025 length 47
+    Stream: column 12 section ROW_INDEX start: 1072 length 47
+    Stream: column 13 section ROW_INDEX start: 1119 length 53
+    Stream: column 14 section ROW_INDEX start: 1172 length 67
+    Stream: column 15 section ROW_INDEX start: 1239 length 81
+    Stream: column 16 section ROW_INDEX start: 1320 length 83
+    Stream: column 17 section ROW_INDEX start: 1403 length 77
+    Stream: column 18 section ROW_INDEX start: 1480 length 77
+    Stream: column 19 section ROW_INDEX start: 1557 length 115
+    Stream: column 20 section ROW_INDEX start: 1672 length 93
+    Stream: column 21 section ROW_INDEX start: 1765 length 63
+    Stream: column 22 section ROW_INDEX start: 1828 length 57
+    Stream: column 23 section ROW_INDEX start: 1885 length 47
+    Stream: column 24 section ROW_INDEX start: 1932 length 47
+    Stream: column 25 section ROW_INDEX start: 1979 length 53
+    Stream: column 26 section ROW_INDEX start: 2032 length 67
+    Stream: column 27 section ROW_INDEX start: 2099 length 81
+    Stream: column 28 section ROW_INDEX start: 2180 length 83
+    Stream: column 29 section ROW_INDEX start: 2263 length 77
+    Stream: column 30 section ROW_INDEX start: 2340 length 77
+    Stream: column 31 section ROW_INDEX start: 2417 length 115
+    Stream: column 32 section ROW_INDEX start: 2532 length 93
+    Stream: column 33 section ROW_INDEX start: 2625 length 63
+    Stream: column 34 section ROW_INDEX start: 2688 length 57
+    Stream: column 35 section ROW_INDEX start: 2745 length 47
+    Stream: column 36 section ROW_INDEX start: 2792 length 47
+    Stream: column 1 section PRESENT start: 2839 length 51
+    Stream: column 1 section DATA start: 2890 length 5448
+    Stream: column 2 section PRESENT start: 8338 length 53
+    Stream: column 2 section DATA start: 8391 length 12144
+    Stream: column 3 section PRESENT start: 20535 length 53
+    Stream: column 3 section DATA start: 20588 length 24618
+    Stream: column 4 section PRESENT start: 45206 length 52
+    Stream: column 4 section DATA start: 45258 length 24681
+    Stream: column 5 section PRESENT start: 69939 length 51
+    Stream: column 5 section DATA start: 69990 length 9927
+    Stream: column 6 section PRESENT start: 79917 length 53
+    Stream: column 6 section DATA start: 79970 length 19755
+    Stream: column 7 section DATA start: 99725 length 258570
+    Stream: column 7 section LENGTH start: 358295 length 108
+    Stream: column 8 section DATA start: 358403 length 517341
+    Stream: column 8 section LENGTH start: 875744 length 108
+    Stream: column 9 section PRESENT start: 875852 length 52
+    Stream: column 9 section DATA start: 875904 length 8045
+    Stream: column 9 section SECONDARY start: 883949 length 9555
+    Stream: column 10 section PRESENT start: 893504 length 58
+    Stream: column 10 section DATA start: 893562 length 8082
+    Stream: column 10 section SECONDARY start: 901644 length 9590
+    Stream: column 11 section PRESENT start: 911234 length 51
+    Stream: column 11 section DATA start: 911285 length 782
+    Stream: column 12 section PRESENT start: 912067 length 54
+    Stream: column 12 section DATA start: 912121 length 783
+    Stream: column 13 section PRESENT start: 912904 length 51
+    Stream: column 13 section DATA start: 912955 length 5448
+    Stream: column 14 section PRESENT start: 918403 length 53
+    Stream: column 14 section DATA start: 918456 length 12144
+    Stream: column 15 section PRESENT start: 930600 length 53
+    Stream: column 15 section DATA start: 930653 length 24618
+    Stream: column 16 section PRESENT start: 955271 length 52
+    Stream: column 16 section DATA start: 955323 length 24681
+    Stream: column 17 section PRESENT start: 980004 length 51
+    Stream: column 17 section DATA start: 980055 length 9927
+    Stream: column 18 section PRESENT start: 989982 length 53
+    Stream: column 18 section DATA start: 990035 length 19755
+    Stream: column 19 section PRESENT start: 1009790 length 51
+    Stream: column 19 section DATA start: 1009841 length 11009
+    Stream: column 19 section LENGTH start: 1020850 length 3722
+    Stream: column 19 section DICTIONARY_DATA start: 1024572 length 65435
+    Stream: column 20 section PRESENT start: 1090007 length 54
+    Stream: column 20 section DATA start: 1090061 length 11006
+    Stream: column 20 section LENGTH start: 1101067 length 3739
+    Stream: column 20 section DICTIONARY_DATA start: 1104806 length 66022
+    Stream: column 21 section PRESENT start: 1170828 length 52
+    Stream: column 21 section DATA start: 1170880 length 8045
+    Stream: column 21 section SECONDARY start: 1178925 length 9555
+    Stream: column 22 section PRESENT start: 1188480 length 58
+    Stream: column 22 section DATA start: 1188538 length 8082
+    Stream: column 22 section SECONDARY start: 1196620 length 9590
+    Stream: column 23 section PRESENT start: 1206210 length 51
+    Stream: column 23 section DATA start: 1206261 length 782
+    Stream: column 24 section PRESENT start: 1207043 length 54
+    Stream: column 24 section DATA start: 1207097 length 783
+    Stream: column 25 section PRESENT start: 1207880 length 51
+    Stream: column 25 section DATA start: 1207931 length 5448
+    Stream: column 26 section PRESENT start: 1213379 length 53
+    Stream: column 26 section DATA start: 1213432 length 12144
+    Stream: column 27 section PRESENT start: 1225576 length 53
+    Stream: column 27 section DATA start: 1225629 length 24618
+    Stream: column 28 section PRESENT start: 1250247 length 52
+    Stream: column 28 section DATA start: 1250299 length 24681
+    Stream: column 29 section PRESENT start: 1274980 length 51
+    Stream: column 29 section DATA start: 1275031 length 9927
+    Stream: column 30 section PRESENT start: 1284958 length 53
+    Stream: column 30 section DATA start: 1285011 length 19755
+    Stream: column 31 section PRESENT start: 1304766 length 51
+    Stream: column 31 section DATA start: 1304817 length 11009
+    Stream: column 31 section LENGTH start: 1315826 length 3722
+    Stream: column 31 section DICTIONARY_DATA start: 1319548 length 65435
+    Stream: column 32 section PRESENT start: 1384983 length 54
+    Stream: column 32 section DATA start: 1385037 length 11006
+    Stream: column 32 section LENGTH start: 1396043 length 3739
+    Stream: column 32 section DICTIONARY_DATA start: 1399782 length 66022
+    Stream: column 33 section PRESENT start: 1465804 length 52
+    Stream: column 33 section DATA start: 1465856 length 8045
+    Stream: column 33 section SECONDARY start: 1473901 length 9555
+    Stream: column 34 section PRESENT start: 1483456 length 58
+    Stream: column 34 section DATA start: 1483514 length 8082
+    Stream: column 34 section SECONDARY start: 1491596 length 9590
+    Stream: column 35 section PRESENT start: 1501186 length 51
+    Stream: column 35 section DATA start: 1501237 length 782
+    Stream: column 36 section PRESENT start: 1502019 length 54
+    Stream: column 36 section DATA start: 1502073 length 783
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT_V2
@@ -437,13 +437,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 2:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 3:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 4:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 5:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -452,16 +452,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 7:
       Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
     Row group indices for column 8:
       Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
     Row group indices for column 9:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 10:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 11:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -473,13 +473,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 14:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 15:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 16:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 17:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -488,16 +488,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 19:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 20:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 21:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 22:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 23:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -509,13 +509,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 26:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 27:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 28:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 29:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -524,146 +524,146 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 31:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 32:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 33:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 34:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 35:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
     Row group indices for column 36:
       Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
-  Stripe: offset: 1501653 data: 1498336 rows: 12288 tail: 493 index: 2821
-    Stream: column 0 section ROW_INDEX start: 1501653 length 21
-    Stream: column 1 section ROW_INDEX start: 1501674 length 53
-    Stream: column 2 section ROW_INDEX start: 1501727 length 67
-    Stream: column 3 section ROW_INDEX start: 1501794 length 79
-    Stream: column 4 section ROW_INDEX start: 1501873 length 83
-    Stream: column 5 section ROW_INDEX start: 1501956 length 77
-    Stream: column 6 section ROW_INDEX start: 1502033 length 77
-    Stream: column 7 section ROW_INDEX start: 1502110 length 170
-    Stream: column 8 section ROW_INDEX start: 1502280 length 264
-    Stream: column 9 section ROW_INDEX start: 1502544 length 63
-    Stream: column 10 section ROW_INDEX start: 1502607 length 57
-    Stream: column 11 section ROW_INDEX start: 1502664 length 47
-    Stream: column 12 section ROW_INDEX start: 1502711 length 47
-    Stream: column 13 section ROW_INDEX start: 1502758 length 53
-    Stream: column 14 section ROW_INDEX start: 1502811 length 67
-    Stream: column 15 section ROW_INDEX start: 1502878 length 79
-    Stream: column 16 section ROW_INDEX start: 1502957 length 83
-    Stream: column 17 section ROW_INDEX start: 1503040 length 77
-    Stream: column 18 section ROW_INDEX start: 1503117 length 77
-    Stream: column 19 section ROW_INDEX start: 1503194 length 115
-    Stream: column 20 section ROW_INDEX start: 1503309 length 93
-    Stream: column 21 section ROW_INDEX start: 1503402 length 63
-    Stream: column 22 section ROW_INDEX start: 1503465 length 57
-    Stream: column 23 section ROW_INDEX start: 1503522 length 47
-    Stream: column 24 section ROW_INDEX start: 1503569 length 47
-    Stream: column 25 section ROW_INDEX start: 1503616 length 53
-    Stream: column 26 section ROW_INDEX start: 1503669 length 67
-    Stream: column 27 section ROW_INDEX start: 1503736 length 79
-    Stream: column 28 section ROW_INDEX start: 1503815 length 83
-    Stream: column 29 section ROW_INDEX start: 1503898 length 77
-    Stream: column 30 section ROW_INDEX start: 1503975 length 77
-    Stream: column 31 section ROW_INDEX start: 1504052 length 115
-    Stream: column 32 section ROW_INDEX start: 1504167 length 93
-    Stream: column 33 section ROW_INDEX start: 1504260 length 63
-    Stream: column 34 section ROW_INDEX start: 1504323 length 57
-    Stream: column 35 section ROW_INDEX start: 1504380 length 47
-    Stream: column 36 section ROW_INDEX start: 1504427 length 47
-    Stream: column 1 section PRESENT start: 1504474 length 51
-    Stream: column 1 section DATA start: 1504525 length 5448
-    Stream: column 2 section PRESENT start: 1509973 length 53
-    Stream: column 2 section DATA start: 1510026 length 12078
-    Stream: column 3 section PRESENT start: 1522104 length 53
-    Stream: column 3 section DATA start: 1522157 length 24479
-    Stream: column 4 section PRESENT start: 1546636 length 52
-    Stream: column 4 section DATA start: 1546688 length 24479
-    Stream: column 5 section PRESENT start: 1571167 length 51
-    Stream: column 5 section DATA start: 1571218 length 9927
-    Stream: column 6 section PRESENT start: 1581145 length 53
-    Stream: column 6 section DATA start: 1581198 length 19755
-    Stream: column 7 section DATA start: 1600953 length 259558
-    Stream: column 7 section LENGTH start: 1860511 length 12
-    Stream: column 8 section DATA start: 1860523 length 518777
-    Stream: column 8 section LENGTH start: 2379300 length 12
-    Stream: column 9 section PRESENT start: 2379312 length 52
-    Stream: column 9 section DATA start: 2379364 length 7769
-    Stream: column 9 section SECONDARY start: 2387133 length 9448
-    Stream: column 10 section PRESENT start: 2396581 length 58
-    Stream: column 10 section DATA start: 2396639 length 7778
-    Stream: column 10 section SECONDARY start: 2404417 length 9469
-    Stream: column 11 section PRESENT start: 2413886 length 51
-    Stream: column 11 section DATA start: 2413937 length 782
-    Stream: column 12 section PRESENT start: 2414719 length 54
-    Stream: column 12 section DATA start: 2414773 length 783
-    Stream: column 13 section PRESENT start: 2415556 length 51
-    Stream: column 13 section DATA start: 2415607 length 5448
-    Stream: column 14 section PRESENT start: 2421055 length 53
-    Stream: column 14 section DATA start: 2421108 length 12078
-    Stream: column 15 section PRESENT start: 2433186 length 53
-    Stream: column 15 section DATA start: 2433239 length 24479
-    Stream: column 16 section PRESENT start: 2457718 length 52
-    Stream: column 16 section DATA start: 2457770 length 24479
-    Stream: column 17 section PRESENT start: 2482249 length 51
-    Stream: column 17 section DATA start: 2482300 length 9927
-    Stream: column 18 section PRESENT start: 2492227 length 53
-    Stream: column 18 section DATA start: 2492280 length 19755
-    Stream: column 19 section PRESENT start: 2512035 length 51
-    Stream: column 19 section DATA start: 2512086 length 10942
-    Stream: column 19 section LENGTH start: 2523028 length 3722
-    Stream: column 19 section DICTIONARY_DATA start: 2526750 length 65435
-    Stream: column 20 section PRESENT start: 2592185 length 54
-    Stream: column 20 section DATA start: 2592239 length 10939
-    Stream: column 20 section LENGTH start: 2603178 length 3739
-    Stream: column 20 section DICTIONARY_DATA start: 2606917 length 66022
-    Stream: column 21 section PRESENT start: 2672939 length 52
-    Stream: column 21 section DATA start: 2672991 length 7769
-    Stream: column 21 section SECONDARY start: 2680760 length 9448
-    Stream: column 22 section PRESENT start: 2690208 length 58
-    Stream: column 22 section DATA start: 2690266 length 7778
-    Stream: column 22 section SECONDARY start: 2698044 length 9469
-    Stream: column 23 section PRESENT start: 2707513 length 51
-    Stream: column 23 section DATA start: 2707564 length 782
-    Stream: column 24 section PRESENT start: 2708346 length 54
-    Stream: column 24 section DATA start: 2708400 length 783
-    Stream: column 25 section PRESENT start: 2709183 length 51
-    Stream: column 25 section DATA start: 2709234 length 5448
-    Stream: column 26 section PRESENT start: 2714682 length 53
-    Stream: column 26 section DATA start: 2714735 length 12078
-    Stream: column 27 section PRESENT start: 2726813 length 53
-    Stream: column 27 section DATA start: 2726866 length 24479
-    Stream: column 28 section PRESENT start: 2751345 length 52
-    Stream: column 28 section DATA start: 2751397 length 24479
-    Stream: column 29 section PRESENT start: 2775876 length 51
-    Stream: column 29 section DATA start: 2775927 length 9927
-    Stream: column 30 section PRESENT start: 2785854 length 53
-    Stream: column 30 section DATA start: 2785907 length 19755
-    Stream: column 31 section PRESENT start: 2805662 length 51
-    Stream: column 31 section DATA start: 2805713 length 10942
-    Stream: column 31 section LENGTH start: 2816655 length 3722
-    Stream: column 31 section DICTIONARY_DATA start: 2820377 length 65435
-    Stream: column 32 section PRESENT start: 2885812 length 54
-    Stream: column 32 section DATA start: 2885866 length 10939
-    Stream: column 32 section LENGTH start: 2896805 length 3739
-    Stream: column 32 section DICTIONARY_DATA start: 2900544 length 66022
-    Stream: column 33 section PRESENT start: 2966566 length 52
-    Stream: column 33 section DATA start: 2966618 length 7769
-    Stream: column 33 section SECONDARY start: 2974387 length 9448
-    Stream: column 34 section PRESENT start: 2983835 length 58
-    Stream: column 34 section DATA start: 2983893 length 7778
-    Stream: column 34 section SECONDARY start: 2991671 length 9469
-    Stream: column 35 section PRESENT start: 3001140 length 51
-    Stream: column 35 section DATA start: 3001191 length 782
-    Stream: column 36 section PRESENT start: 3001973 length 54
-    Stream: column 36 section DATA start: 3002027 length 783
+  Stripe: offset: 1503357 data: 1500017 rows: 12288 tail: 501 index: 2836
+    Stream: column 0 section ROW_INDEX start: 1503357 length 21
+    Stream: column 1 section ROW_INDEX start: 1503378 length 53
+    Stream: column 2 section ROW_INDEX start: 1503431 length 67
+    Stream: column 3 section ROW_INDEX start: 1503498 length 81
+    Stream: column 4 section ROW_INDEX start: 1503579 length 83
+    Stream: column 5 section ROW_INDEX start: 1503662 length 77
+    Stream: column 6 section ROW_INDEX start: 1503739 length 77
+    Stream: column 7 section ROW_INDEX start: 1503816 length 176
+    Stream: column 8 section ROW_INDEX start: 1503992 length 267
+    Stream: column 9 section ROW_INDEX start: 1504259 length 63
+    Stream: column 10 section ROW_INDEX start: 1504322 length 57
+    Stream: column 11 section ROW_INDEX start: 1504379 length 47
+    Stream: column 12 section ROW_INDEX start: 1504426 length 47
+    Stream: column 13 section ROW_INDEX start: 1504473 length 53
+    Stream: column 14 section ROW_INDEX start: 1504526 length 67
+    Stream: column 15 section ROW_INDEX start: 1504593 length 81
+    Stream: column 16 section ROW_INDEX start: 1504674 length 83
+    Stream: column 17 section ROW_INDEX start: 1504757 length 77
+    Stream: column 18 section ROW_INDEX start: 1504834 length 77
+    Stream: column 19 section ROW_INDEX start: 1504911 length 115
+    Stream: column 20 section ROW_INDEX start: 1505026 length 93
+    Stream: column 21 section ROW_INDEX start: 1505119 length 63
+    Stream: column 22 section ROW_INDEX start: 1505182 length 57
+    Stream: column 23 section ROW_INDEX start: 1505239 length 47
+    Stream: column 24 section ROW_INDEX start: 1505286 length 47
+    Stream: column 25 section ROW_INDEX start: 1505333 length 53
+    Stream: column 26 section ROW_INDEX start: 1505386 length 67
+    Stream: column 27 section ROW_INDEX start: 1505453 length 81
+    Stream: column 28 section ROW_INDEX start: 1505534 length 83
+    Stream: column 29 section ROW_INDEX start: 1505617 length 77
+    Stream: column 30 section ROW_INDEX start: 1505694 length 77
+    Stream: column 31 section ROW_INDEX start: 1505771 length 115
+    Stream: column 32 section ROW_INDEX start: 1505886 length 93
+    Stream: column 33 section ROW_INDEX start: 1505979 length 63
+    Stream: column 34 section ROW_INDEX start: 1506042 length 57
+    Stream: column 35 section ROW_INDEX start: 1506099 length 47
+    Stream: column 36 section ROW_INDEX start: 1506146 length 47
+    Stream: column 1 section PRESENT start: 1506193 length 51
+    Stream: column 1 section DATA start: 1506244 length 5448
+    Stream: column 2 section PRESENT start: 1511692 length 53
+    Stream: column 2 section DATA start: 1511745 length 12144
+    Stream: column 3 section PRESENT start: 1523889 length 53
+    Stream: column 3 section DATA start: 1523942 length 24618
+    Stream: column 4 section PRESENT start: 1548560 length 52
+    Stream: column 4 section DATA start: 1548612 length 24681
+    Stream: column 5 section PRESENT start: 1573293 length 51
+    Stream: column 5 section DATA start: 1573344 length 9927
+    Stream: column 6 section PRESENT start: 1583271 length 53
+    Stream: column 6 section DATA start: 1583324 length 19755
+    Stream: column 7 section DATA start: 1603079 length 258570
+    Stream: column 7 section LENGTH start: 1861649 length 108
+    Stream: column 8 section DATA start: 1861757 length 517341
+    Stream: column 8 section LENGTH start: 2379098 length 108
+    Stream: column 9 section PRESENT start: 2379206 length 52
+    Stream: column 9 section DATA start: 2379258 length 8045
+    Stream: column 9 section SECONDARY start: 2387303 length 9555
+    Stream: column 10 section PRESENT start: 2396858 length 58
+    Stream: column 10 section DATA start: 2396916 length 8082
+    Stream: column 10 section SECONDARY start: 2404998 length 9590
+    Stream: column 11 section PRESENT start: 2414588 length 51
+    Stream: column 11 section DATA start: 2414639 length 782
+    Stream: column 12 section PRESENT start: 2415421 length 54
+    Stream: column 12 section DATA start: 2415475 length 783
+    Stream: column 13 section PRESENT start: 2416258 length 51
+    Stream: column 13 section DATA start: 2416309 length 5448
+    Stream: column 14 section PRESENT start: 2421757 length 53
+    Stream: column 14 section DATA start: 2421810 length 12144
+    Stream: column 15 section PRESENT start: 2433954 length 53
+    Stream: column 15 section DATA start: 2434007 length 24618
+    Stream: column 16 section PRESENT start: 2458625 length 52
+    Stream: column 16 section DATA start: 2458677 length 24681
+    Stream: column 17 section PRESENT start: 2483358 length 51
+    Stream: column 17 section DATA start: 2483409 length 9927
+    Stream: column 18 section PRESENT start: 2493336 length 53
+    Stream: column 18 section DATA start: 2493389 length 19755
+    Stream: column 19 section PRESENT start: 2513144 length 51
+    Stream: column 19 section DATA start: 2513195 length 11009
+    Stream: column 19 section LENGTH start: 2524204 length 3722
+    Stream: column 19 section DICTIONARY_DATA start: 2527926 length 65435
+    Stream: column 20 section PRESENT start: 2593361 length 54
+    Stream: column 20 section DATA start: 2593415 length 11006
+    Stream: column 20 section LENGTH start: 2604421 length 3739
+    Stream: column 20 section DICTIONARY_DATA start: 2608160 length 66022
+    Stream: column 21 section PRESENT start: 2674182 length 52
+    Stream: column 21 section DATA start: 2674234 length 8045
+    Stream: column 21 section SECONDARY start: 2682279 length 9555
+    Stream: column 22 section PRESENT start: 2691834 length 58
+    Stream: column 22 section DATA start: 2691892 length 8082
+    Stream: column 22 section SECONDARY start: 2699974 length 9590
+    Stream: column 23 section PRESENT start: 2709564 length 51
+    Stream: column 23 section DATA start: 2709615 length 782
+    Stream: column 24 section PRESENT start: 2710397 length 54
+    Stream: column 24 section DATA start: 2710451 length 783
+    Stream: column 25 section PRESENT start: 2711234 length 51
+    Stream: column 25 section DATA start: 2711285 length 5448
+    Stream: column 26 section PRESENT start: 2716733 length 53
+    Stream: column 26 section DATA start: 2716786 length 12144
+    Stream: column 27 section PRESENT start: 2728930 length 53
+    Stream: column 27 section DATA start: 2728983 length 24618
+    Stream: column 28 section PRESENT start: 2753601 length 52
+    Stream: column 28 section DATA start: 2753653 length 24681
+    Stream: column 29 section PRESENT start: 2778334 length 51
+    Stream: column 29 section DATA start: 2778385 length 9927
+    Stream: column 30 section PRESENT start: 2788312 length 53
+    Stream: column 30 section DATA start: 2788365 length 19755
+    Stream: column 31 section PRESENT start: 2808120 length 51
+    Stream: column 31 section DATA start: 2808171 length 11009
+    Stream: column 31 section LENGTH start: 2819180 length 3722
+    Stream: column 31 section DICTIONARY_DATA start: 2822902 length 65435
+    Stream: column 32 section PRESENT start: 2888337 length 54
+    Stream: column 32 section DATA start: 2888391 length 11006
+    Stream: column 32 section LENGTH start: 2899397 length 3739
+    Stream: column 32 section DICTIONARY_DATA start: 2903136 length 66022
+    Stream: column 33 section PRESENT start: 2969158 length 52
+    Stream: column 33 section DATA start: 2969210 length 8045
+    Stream: column 33 section SECONDARY start: 2977255 length 9555
+    Stream: column 34 section PRESENT start: 2986810 length 58
+    Stream: column 34 section DATA start: 2986868 length 8082
+    Stream: column 34 section SECONDARY start: 2994950 length 9590
+    Stream: column 35 section PRESENT start: 3004540 length 51
+    Stream: column 35 section DATA start: 3004591 length 782
+    Stream: column 36 section PRESENT start: 3005373 length 54
+    Stream: column 36 section DATA start: 3005427 length 783
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT_V2
@@ -709,13 +709,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 2:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 3:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 4:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 5:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -724,16 +724,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 7:
       Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
     Row group indices for column 8:
       Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
     Row group indices for column 9:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 10:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 11:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -745,13 +745,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 14:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 15:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 16:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 17:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -760,16 +760,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 19:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 20:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 21:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 22:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 23:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -781,13 +781,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 26:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 27:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 28:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 29:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -796,16 +796,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 31:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 32:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 33:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 34:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 35:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -813,7 +813,7 @@ Stripes:
       Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
 
-File length: 3004630 bytes
+File length: 3007984 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge5.q.out b/ql/src/test/results/clientpositive/orc_merge5.q.out
index 5aa64c1..768132c 100644
--- a/ql/src/test/results/clientpositive/orc_merge5.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge5.q.out
@@ -38,17 +38,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n5
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -57,7 +57,7 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp)
                   outputColumnNames: userid, string1, subtype, decimal1, ts
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     mode: hash
@@ -120,7 +120,7 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
@@ -154,17 +154,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n5
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -173,7 +173,7 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp)
                   outputColumnNames: userid, string1, subtype, decimal1, ts
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     mode: hash
@@ -299,7 +299,7 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY


[20/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out
index 966f6c5..9be5235 100644
--- a/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out
@@ -137,7 +137,7 @@ STAGE PLANS:
                   alias: timestamps
                   Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 
 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ct
 imestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
                     Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
@@ -269,49 +269,49 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT
    ctimestamp1,
@@ -402,13 +402,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 
 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ct
 imestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [1, 3, 10, 12, 13, 14, 11, 7, 16, 23, 2]
-                        selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timesta
 mp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(chil
 dren: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(col 1:timest
 amp, val 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
+                        selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:t
 imestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:st
 ring)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampSca
 lar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
                     Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
@@ -554,49 +554,49 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT
    ctimestamp1,
@@ -687,13 +687,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 
 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+                    expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ct
 imestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [1, 3, 15, 26, 36, 40, 42, 44, 46, 53, 2]
-                        selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCo
 ndExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(col 
 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int, Vector
 UDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(children: Doubl
 eColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
+                        selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprCo
 lumnCondExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCon
 dExpr(col 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 
 38:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)
 (children: DoubleColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
                     Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
@@ -839,46 +839,46 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_data_types.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out
index 6f61adc..a1d18cd 100644
--- a/ql/src/test/results/clientpositive/llap/vector_data_types.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_data_types.q.out
@@ -209,7 +209,7 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
@@ -453,4 +453,4 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
index 8d8cbf5..b0e5787 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out
@@ -1095,7 +1095,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_1
 #### A masked pattern was here ####
 NULL
-1970-01-01 00:00:17.29
+1969-12-31 16:00:17.29
 PREHOOK: query: drop table decimal_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
index 8cd753c..8f0cc4d 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out
@@ -2074,13 +2074,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:t:decimal(18,9)/DECIMAL_64, 1:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: 1355915539.1234567 (type: decimal(30,8))
+                    expressions: 1355944339.1234567 (type: decimal(30,8))
                     outputColumnNames: _col0
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [2]
-                        selectExpressions: ConstantVectorExpression(val 1355915539.1234567) -> 2:decimal(30,8)
+                        selectExpressions: ConstantVectorExpression(val 1355944339.1234567) -> 2:decimal(30,8)
                     Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
                     File Output Operator
                       compressed: false
@@ -2124,7 +2124,7 @@ POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2
 #### A masked pattern was here ####
-1355915539.12345670
+1355944339.12345670
 PREHOOK: query: explain vectorization detail
 select cast(true as decimal) as c from decimal_2 order by c
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out
index 1c3f4b6..2414907 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_cast.q.out
@@ -87,16 +87,16 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-28813
--15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-28804
--9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-28816
-15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-28810
-7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	-28785
-4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	-28793
--7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	-28795
--15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-28808
--15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-28815
-5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-28816
+-13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-13
+-15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-4
+-9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-16
+15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-10
+7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	15
+4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	7
+-7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	5
+-15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-8
+-15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-15
+5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-16
 PREHOOK: query: CREATE TABLE alltypes_small STORED AS TEXTFILE AS SELECT * FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
@@ -210,13 +210,13 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_small
 #### A masked pattern was here ####
--13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-28813
--15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-28804
--9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-28816
-15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-28810
-7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	-28785
-4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	-28793
--7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	-28795
--15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-28808
--15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-28815
-5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-28816
+-13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-13
+-15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-4
+-9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-16
+15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-10
+7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	15
+4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	7
+-7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	5
+-15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-8
+-15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-15
+5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
index 2e3c914..024ce07 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out
@@ -165,16 +165,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
-1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1970-01-01 00:13:55.618918918
-1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1970-01-01 00:14:04.578378378
-1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1970-01-01 00:14:05.772972973
-1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1970-01-01 00:14:07.564864864
-1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1970-01-01 00:14:17.121621621
-1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1970-01-01 00:14:18.316216216
-1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1970-01-01 00:14:18.913513513
-1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1970-01-01 00:14:22.497297297
-1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1970-01-01 00:14:29.067567567
-1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1970-01-01 00:14:30.859459459
+1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1969-12-31 16:13:55.618918918
+1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1969-12-31 16:14:04.578378378
+1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1969-12-31 16:14:05.772972973
+1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1969-12-31 16:14:07.564864864
+1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1969-12-31 16:14:17.121621621
+1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1969-12-31 16:14:18.316216216
+1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1969-12-31 16:14:18.913513513
+1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1969-12-31 16:14:22.497297297
+1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1969-12-31 16:14:29.067567567
+1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1969-12-31 16:14:30.859459459
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_n1 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
@@ -338,16 +338,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_small_n0
 #### A masked pattern was here ####
-1836.439	-1166.021	0.83727243660	245971.826152056	5.619	835	1000	NULL	835	true	1000.82	835.619	1000.82	1970-01-01 00:13:55.619
-1856.128	-1178.522	0.83724778805	251274.375364068	4.578	844	1011	NULL	844	true	1011.55	844.578	1011.55	1970-01-01 00:14:04.578
-1858.753	-1180.187	0.83724555273	251985.627412262	5.773	845	1012	NULL	845	true	1012.98	845.773	1012.98	1970-01-01 00:14:05.773
-1862.695	-1182.695	0.83723759518	253055.487729555	7.565	847	1015	NULL	847	true	1015.13	847.565	1015.13	1970-01-01 00:14:07.565
-1883.702	-1196.038	0.83720898517	258795.383063868	7.122	857	1026	NULL	857	true	1026.58	857.122	1026.58	1970-01-01 00:14:17.122
-1886.326	-1197.704	0.83720586376	259516.891214712	8.316	858	1028	NULL	858	true	1028.01	858.316	1028.01	1970-01-01 00:14:18.316
-1887.634	-1198.526	0.83720934754	259877.061889284	8.914	858	1028	NULL	858	true	1028.72	858.914	1028.72	1970-01-01 00:14:18.914
-1895.517	-1203.543	0.83719289075	262051.956361764	2.497	862	1033	NULL	862	true	1033.02	862.497	1033.02	1970-01-01 00:14:22.497
-1909.948	-1212.692	0.83718392130	266057.499543968	9.068	869	1040	NULL	869	true	1040.88	869.068	1040.88	1970-01-01 00:14:29.068
-1913.889	-1215.201	0.83717534491	267156.488691411	0.859	870	1043	NULL	870	true	1043.03	870.859	1043.03	1970-01-01 00:14:30.859
+1836.439	-1166.021	0.83727243660	245971.826152056	5.619	835	1000	NULL	835	true	1000.82	835.619	1000.82	1969-12-31 16:13:55.619
+1856.128	-1178.522	0.83724778805	251274.375364068	4.578	844	1011	NULL	844	true	1011.55	844.578	1011.55	1969-12-31 16:14:04.578
+1858.753	-1180.187	0.83724555273	251985.627412262	5.773	845	1012	NULL	845	true	1012.98	845.773	1012.98	1969-12-31 16:14:05.773
+1862.695	-1182.695	0.83723759518	253055.487729555	7.565	847	1015	NULL	847	true	1015.13	847.565	1015.13	1969-12-31 16:14:07.565
+1883.702	-1196.038	0.83720898517	258795.383063868	7.122	857	1026	NULL	857	true	1026.58	857.122	1026.58	1969-12-31 16:14:17.122
+1886.326	-1197.704	0.83720586376	259516.891214712	8.316	858	1028	NULL	858	true	1028.01	858.316	1028.01	1969-12-31 16:14:18.316
+1887.634	-1198.526	0.83720934754	259877.061889284	8.914	858	1028	NULL	858	true	1028.72	858.914	1028.72	1969-12-31 16:14:18.914
+1895.517	-1203.543	0.83719289075	262051.956361764	2.497	862	1033	NULL	862	true	1033.02	862.497	1033.02	1969-12-31 16:14:22.497
+1909.948	-1212.692	0.83718392130	266057.499543968	9.068	869	1040	NULL	869	true	1040.88	869.068	1040.88	1969-12-31 16:14:29.068
+1913.889	-1215.201	0.83717534491	267156.488691411	0.859	870	1043	NULL	870	true	1043.03	870.859	1043.03	1969-12-31 16:14:30.859
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_small_n0 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out
index 43c5761..815b2a3 100644
--- a/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_interval_1.q.out
@@ -807,13 +807,13 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time)
+                    expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time)
                     outputColumnNames: _col0, _col1, _col2, _col3
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 5, 6, 7]
-                        selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03) -> 7:interval_day_time
+                        selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03.0, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03.0) -> 7:interval_day_time
                     Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: timestamp)
@@ -937,7 +937,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [1, 5, 6, 7]
-                        selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01) -> 7:interval_day_time
+                        selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01 00:00:00.0, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01 00:00:00.0) -> 7:interval_day_time
                     Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: date)
@@ -1061,13 +1061,13 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
+                    expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [1, 5, 6, 7, 8, 9, 10]
-                        selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01, col 0:timestamp) -> 10:interval_day_time
+                        selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03.0, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01 00:00:00.0) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03.0) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01 00:00:00.0, col 0:timestamp) -> 10:interval_day_time
                     Statistics: Num rows: 2 Data size: 192 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: date)


[17/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
index 45ae532..8f5ce87 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_casts.q.out
@@ -183,7 +183,7 @@ STAGE PLANS:
                           className: VectorSelectOperator
                           native: true
                           projectedOutputColumnNums: [13, 14, 15, 16, 17, 18, 10, 20, 19, 21, 0, 1, 2, 3, 22, 23, 10, 24, 25, 27, 28, 29, 30, 31, 32, 33, 34, 4, 5, 35, 36, 37, 38, 39, 5, 41, 43, 45, 47, 48, 49, 51, 54, 55, 8, 56, 57, 26, 58, 59, 60, 61, 62, 63, 64, 65, 6, 67, 68, 69, 70, 66, 73]
-                          selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLo
 ng(col 4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51
 :timestamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, CastTimestampToString(col 8:timestamp) -> 65:string, CastStringG
 roupToString(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
+                          selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLo
 ng(col 4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51
 :timestamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, VectorUDFAdaptor(UDFToString(ctimestamp1)) -> 65:string, CastStr
 ingGroupToString(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
                       Statistics: Num rows: 6144 Data size: 16362860 Basic stats: COMPLETE Column stats: COMPLETE
                       File Output Operator
                         compressed: false
@@ -371,29 +371,29 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-true	NULL	true	true	true	NULL	false	false	true	true	-51	NULL	773600971	1053923250	-51	NULL	0	-28792	NULL	2	-51	-51	-51	-51.0	NULL	7.73600971E8	1.05392325E9	-51.0	NULL	0.0	-28791.549	NULL	2.0	7.7360096E8	NULL	1969-12-31 23:59:59.949	NULL	1970-01-09 22:53:20.971	1970-01-13 04:45:23.25	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	773600971	1053923250	-51.0	NULL	FALSE	0	1969-12-31 16:00:08.451	2yK4Bx76O	2yK4Bx76O	2yK4Bx76O	-51.0	1.547201942E9	-0.6702291758433747	7.7360096E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-102936434	-1312782750	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	-1.02936434E8	-1.31278275E9	8.0	NULL	0.0	-28784.108	NULL	NULL	-1.02936432E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-30 19:24:23.566	1969-12-16 19:20:17.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-102936434	-1312782750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	eJROSNhugc3kQR7Pb	eJROSNhugc	eJROSNhugc	8.0	-2.05872868E8	0.9893582466233818	-1.02936432E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-661621138	-931392750	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	-6.61621138E8	-9.3139275E8	8.0	NULL	0.0	-28784.108	NULL	NULL	-6.6162112E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-24 08:12:58.862	1969-12-21 05:16:47.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-661621138	-931392750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	L15l8i5k558tBcDV20	L15l8i5k55	L15l8i5k55	8.0	-1.323242276E9	0.9893582466233818	-6.6162112E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-669632311	1588591250	8	NULL	0	-28785	NULL	3	8	8	8	8.0	NULL	-6.69632311E8	1.58859125E9	8.0	NULL	0.0	-28784.108	NULL	3.0	-6.6963232E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-24 05:59:27.689	1970-01-19 09:16:31.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-669632311	1588591250	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	3r3sDvfUkG0yTP3LnX5mNQRr	3r3sDvfUkG	3r3sDvfUkG	8.0	-1.339264622E9	0.9893582466233818	-6.6963232E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	805179664	868161500	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	8.05179664E8	8.681615E8	8.0	NULL	0.0	-28784.108	NULL	NULL	8.0517965E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-10 07:39:39.664	1970-01-11 01:09:21.5	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	805179664	868161500	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	e005B5q	e005B5q	e005B5q	8.0	1.610359328E9	0.9893582466233818	8.05179648E8
-true	NULL	true	true	true	NULL	true	false	true	true	-51	NULL	747553882	-1930467250	-51	NULL	1	-28792	NULL	NULL	-51	-51	-51	-51.0	NULL	7.47553882E8	-1.93046725E9	-51.0	NULL	1.0	-28791.549	NULL	NULL	7.4755386E8	NULL	1969-12-31 23:59:59.949	NULL	1970-01-09 15:39:13.882	1969-12-09 15:45:32.75	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	747553882	-1930467250	-51.0	NULL	TRUE	0	1969-12-31 16:00:08.451	q8M86Fx0r	q8M86Fx0r	q8M86Fx0r	-51.0	1.495107764E9	-0.6702291758433747	7.47553857E8
-true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-335450417	1233327000	11	NULL	1	-28798	NULL	NULL	11	11	11	11.0	NULL	-3.35450417E8	1.233327E9	11.0	NULL	1.0	-28797.649	NULL	NULL	-3.35450432E8	NULL	1970-01-01 00:00:00.011	NULL	1969-12-28 02:49:09.583	1970-01-15 06:35:27	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-335450417	1233327000	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	dOYnqgaXoJ1P3ERwxe5N7	dOYnqgaXoJ	dOYnqgaXoJ	11.0	-6.70900834E8	-0.9999902065507035	-3.35450431E8
-true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750	11	NULL	1	-28798	NULL	8	11	11	11	11.0	NULL	-6.4615982E7	1.80305375E9	11.0	NULL	1.0	-28797.649	NULL	8.0	-6.4615984E7	NULL	1970-01-01 00:00:00.011	NULL	1969-12-31 06:03:04.018	1970-01-21 20:50:53.75	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-64615982	1803053750	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	8J5OB7K26PEV7kdbeHr3	8J5OB7K26P	8J5OB7K26P	11.0	-1.29231964E8	-0.9999902065507035	-6.4615983E7
-true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	-28785	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	-28784.108	NULL	NULL	8.9098899E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-11 07:29:48.972	1969-12-10 10:41:39	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	XylAH4	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988993E8
-true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	-28785	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	-28784.108	NULL	NULL	9.3086726E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-11 18:34:27.246	1970-01-14 22:49:59.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	c1V8o1A	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867265E8
-true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	-28789	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	-28788.935	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.986	1969-12-31 23:59:52.804	NULL	1969-12-14 00:50:00.5	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	NULL	NULL	-14.0	NULL	-0.9906073556948704	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-28805	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-28804.1	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.979	1969-12-31 23:59:52.804	NULL	1970-01-18 20:27:09	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	NULL	NULL	-21.0	NULL	-0.8366556385360561	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-28807	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-28806.855	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.976	1969-12-31 23:59:52.804	NULL	1970-01-10 14:18:31	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	NULL	NULL	-24.0	NULL	0.9055783620066238	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	-28788	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	-28787.065	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.97	1969-12-31 23:59:59.8	NULL	1970-01-17 13:10:52.25	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	NULL	NULL	-30.0	NULL	0.9880316240928618	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-28815	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-28814.252	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1969-12-08 18:43:03.25	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-28807	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-28806.183	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1970-01-19 12:24:39	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-28802	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-28801.386	NULL	NULL	NULL	15601.0	1969-12-31 23:59:59.962	1970-01-01 00:00:15.601	NULL	1969-12-10 11:41:51	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	NULL	NULL	-38.0	NULL	-0.2963685787093853	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	-28796	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	-28795.321	NULL	NULL	NULL	15601.0	1969-12-31 23:59:59.995	1970-01-01 00:00:15.601	NULL	1970-01-08 02:06:56	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	NULL	NULL	-5.0	NULL	0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-28806	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-28805.267	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.95	1969-12-31 23:59:52.804	NULL	1969-12-20 01:33:32.75	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	NULL	NULL	-50.0	NULL	0.26237485370392877	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	-28787	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	-28786.85	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.941	1969-12-31 23:59:52.804	NULL	1969-12-13 10:11:50	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	NULL	NULL	-59.0	NULL	-0.6367380071391379	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-28808	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-28807.592	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.94	1969-12-31 23:59:52.804	NULL	1970-01-18 13:11:54.75	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	NULL	NULL	-60.0	NULL	0.3048106211022167	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	-28797	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	-28796.864	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.992	1969-12-31 23:59:52.804	NULL	1969-12-10 14:06:48.5	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	NULL	NULL	-8.0	NULL	-0.9893582466233818	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-28815	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-28814.871	NULL	NULL	NULL	15601.0	1970-01-01 00:00:00.02	1970-01-01 00:00:15.601	NULL	1969-12-27 19:19:26.75	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	NULL	NULL	20.0	NULL	0.9129452507276277	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-28810	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-28809.765	NULL	NULL	NULL	15601.0	1970-01-01 00:00:00.048	1970-01-01 00:00:15.601	NULL	1969-12-22 19:03:59	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	NULL	NULL	48.0	NULL	-0.7682546613236668	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	-28790	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	-28789.027	NULL	NULL	NULL	-7196.0	1970-01-01 00:00:00.005	1969-12-31 23:59:52.804	NULL	1969-12-20 05:53:12.5	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	NULL	NULL	5.0	NULL	-0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	-28790	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	-28789.044	NULL	NULL	NULL	-7196.0	1970-01-01 00:00:00.059	1969-12-31 23:59:52.804	NULL	1969-12-18 19:57:25.5	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	NULL	NULL	59.0	NULL	0.6367380071391379	NULL
+true	NULL	true	true	true	NULL	false	false	true	true	-51	NULL	773600971	1053923250	-51	NULL	0	8	NULL	2	-51	-51	-51	-51.0	NULL	7.73600971E8	1.05392325E9	-51.0	NULL	0.0	8.451	NULL	2.0	7.7360096E8	NULL	1969-12-31 15:59:59.949	NULL	1970-01-09 14:53:20.971	1970-01-12 20:45:23.25	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	773600971	1053923250	-51.0	NULL	FALSE	0	1969-12-31 16:00:08.451	2yK4Bx76O	2yK4Bx76O	2yK4Bx76O	-51.0	1.547201942E9	-0.6702291758433747	7.7360096E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-102936434	-1312782750	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	-1.02936434E8	-1.31278275E9	8.0	NULL	0.0	15.892	NULL	NULL	-1.02936432E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-30 11:24:23.566	1969-12-16 11:20:17.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-102936434	-1312782750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	eJROSNhugc3kQR7Pb	eJROSNhugc	eJROSNhugc	8.0	-2.05872868E8	0.9893582466233818	-1.02936432E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-661621138	-931392750	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	-6.61621138E8	-9.3139275E8	8.0	NULL	0.0	15.892	NULL	NULL	-6.6162112E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-24 00:12:58.862	1969-12-20 21:16:47.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-661621138	-931392750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	L15l8i5k558tBcDV20	L15l8i5k55	L15l8i5k55	8.0	-1.323242276E9	0.9893582466233818	-6.6162112E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-669632311	1588591250	8	NULL	0	15	NULL	3	8	8	8	8.0	NULL	-6.69632311E8	1.58859125E9	8.0	NULL	0.0	15.892	NULL	3.0	-6.6963232E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-23 21:59:27.689	1970-01-19 01:16:31.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-669632311	1588591250	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	3r3sDvfUkG0yTP3LnX5mNQRr	3r3sDvfUkG	3r3sDvfUkG	8.0	-1.339264622E9	0.9893582466233818	-6.6963232E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	805179664	868161500	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	8.05179664E8	8.681615E8	8.0	NULL	0.0	15.892	NULL	NULL	8.0517965E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-09 23:39:39.664	1970-01-10 17:09:21.5	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	805179664	868161500	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	e005B5q	e005B5q	e005B5q	8.0	1.610359328E9	0.9893582466233818	8.05179648E8
+true	NULL	true	true	true	NULL	true	false	true	true	-51	NULL	747553882	-1930467250	-51	NULL	1	8	NULL	NULL	-51	-51	-51	-51.0	NULL	7.47553882E8	-1.93046725E9	-51.0	NULL	1.0	8.451	NULL	NULL	7.4755386E8	NULL	1969-12-31 15:59:59.949	NULL	1970-01-09 07:39:13.882	1969-12-09 07:45:32.75	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	747553882	-1930467250	-51.0	NULL	TRUE	0	1969-12-31 16:00:08.451	q8M86Fx0r	q8M86Fx0r	q8M86Fx0r	-51.0	1.495107764E9	-0.6702291758433747	7.47553857E8
+true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-335450417	1233327000	11	NULL	1	2	NULL	NULL	11	11	11	11.0	NULL	-3.35450417E8	1.233327E9	11.0	NULL	1.0	2.351	NULL	NULL	-3.35450432E8	NULL	1969-12-31 16:00:00.011	NULL	1969-12-27 18:49:09.583	1970-01-14 22:35:27	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-335450417	1233327000	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	dOYnqgaXoJ1P3ERwxe5N7	dOYnqgaXoJ	dOYnqgaXoJ	11.0	-6.70900834E8	-0.9999902065507035	-3.35450431E8
+true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750	11	NULL	1	2	NULL	8	11	11	11	11.0	NULL	-6.4615982E7	1.80305375E9	11.0	NULL	1.0	2.351	NULL	8.0	-6.4615984E7	NULL	1969-12-31 16:00:00.011	NULL	1969-12-30 22:03:04.018	1970-01-21 12:50:53.75	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-64615982	1803053750	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	8J5OB7K26PEV7kdbeHr3	8J5OB7K26P	8J5OB7K26P	11.0	-1.29231964E8	-0.9999902065507035	-6.4615983E7
+true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	15.892	NULL	NULL	8.9098899E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	XylAH4	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988993E8
+true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	15.892	NULL	NULL	9.3086726E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	c1V8o1A	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867265E8
+true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	11	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	11.065	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	NULL	NULL	-14.0	NULL	-0.9906073556948704	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-5	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	NULL	NULL	-21.0	NULL	-0.8366556385360561	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-7	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	NULL	NULL	-24.0	NULL	0.9055783620066238	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	12	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	12.935	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	NULL	NULL	-30.0	NULL	0.9880316240928618	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-15	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-7	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-2	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.3860000000000001	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	NULL	NULL	-38.0	NULL	-0.2963685787093853	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	4	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	4.679	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	NULL	NULL	-5.0	NULL	0.9589242746631385	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-6	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	NULL	NULL	-50.0	NULL	0.26237485370392877	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	13	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	13.15	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	NULL	NULL	-59.0	NULL	-0.6367380071391379	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-8	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	NULL	NULL	-60.0	NULL	0.3048106211022167	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	3	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	3.136	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	NULL	NULL	-8.0	NULL	-0.9893582466233818	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-15	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	NULL	NULL	20.0	NULL	0.9129452507276277	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-10	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	NULL	NULL	48.0	NULL	-0.7682546613236668	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	10	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	10.973	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	NULL	NULL	5.0	NULL	-0.9589242746631385	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	10	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	10.956	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	NULL	NULL	59.0	NULL	0.6367380071391379	NULL


[07/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_case_when_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/vector_case_when_2.q.out
index 9ff8750..76c7f3d 100644
--- a/ql/src/test/results/clientpositive/vector_case_when_2.q.out
+++ b/ql/src/test/results/clientpositive/vector_case_when_2.q.out
@@ -131,7 +131,7 @@ STAGE PLANS:
             alias: timestamps
             Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIME
 STAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimesta
 mp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
               Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
@@ -243,49 +243,49 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT
    ctimestamp1,
@@ -370,13 +370,13 @@ STAGE PLANS:
                 native: true
                 vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
             Select Operator
-              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIME
 STAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimesta
 mp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [1, 3, 10, 12, 13, 14, 11, 7, 16, 23, 2]
-                  selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, va
 l 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(children: 
 TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(col 1:timestamp, v
 al 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
+                  selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timesta
 mp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(
 children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(co
 l 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
               Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
@@ -502,49 +502,49 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT
    ctimestamp1,
@@ -629,13 +629,13 @@ STAGE PLANS:
                 native: true
                 vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
             Select Operator
-              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIME
 STAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+              expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimesta
 mp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [1, 3, 15, 26, 36, 40, 42, 44, 46, 53, 2]
-                  selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCondExpr
 (col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(col 27:boo
 lean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int, VectorUDFYea
 rTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(children: DoubleColGr
 eaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
+                  selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCo
 ndExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(
 col 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int
 , VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(child
 ren: DoubleColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
               Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
@@ -761,46 +761,46 @@ NULL	NULL	Unknown	NULL	NULL	NULL	2018-03-08 23:04:59	NULL	NULL	NULL
 1815-05-06 00:12:37.543584705	1815-05-04 22:09:33.543584705	1900s	Old	Old	1815	2018-03-08 23:04:59	12	NULL	1816-05-05
 1883-04-17 04:14:34.647766229	1883-04-16 02:11:30.647766229	1900s	Old	Old	1883	2018-03-08 23:04:59	14	NULL	1884-04-16
 1966-08-16 13:36:50.183618031	1966-08-15 11:33:46.183618031	Early 2010s	Old	Old	1966	1966-08-16 13:36:50.183618031	36	NULL	1967-08-16
-1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1973-04-18
+1973-04-17 06:30:38.596784156	1973-04-16 04:27:34.596784156	Early 2010s	Old	Old	1973	1973-04-17 06:30:38.596784156	30	NULL	1974-04-17
 1974-10-04 17:21:03.989	1974-10-03 15:17:59.989	Early 2010s	Old	Old	1974	1974-10-04 17:21:03.989	21	NULL	1974-10-05
 1976-03-03 04:54:33.000895162	1976-03-02 02:51:29.000895162	Early 2010s	Old	Old	1976	1976-03-03 04:54:33.000895162	54	NULL	1976-03-04
-1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1976-05-07
-1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1979-08-05
-1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1981-04-26
+1976-05-06 00:42:30.910786948	1976-05-04 22:39:26.910786948	Early 2010s	Old	Old	1976	1976-05-06 00:42:30.910786948	42	NULL	1977-05-06
+1978-08-05 14:41:05.501	1978-08-04 12:38:01.501	Early 2010s	Old	Old	1978	1978-08-05 14:41:05.501	41	NULL	1978-08-06
+1981-04-25 09:01:12.077192689	1981-04-24 06:58:08.077192689	Early 2010s	Old	Old	1981	1981-04-25 09:01:12.077192689	1	NULL	1982-04-25
 1981-11-15 23:03:10.999338387	1981-11-14 21:00:06.999338387	Early 2010s	Old	Old	1981	1981-11-15 23:03:10.999338387	3	NULL	1981-11-16
-1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1985-07-21
+1985-07-20 09:30:11	1985-07-19 07:27:07	Early 2010s	Old	Old	1985	1985-07-20 09:30:11	30	NULL	1986-07-20
 1985-11-18 16:37:54	1985-11-17 14:34:50	Early 2010s	Old	Old	1985	1985-11-18 16:37:54	37	NULL	1985-11-19
 1987-02-21 19:48:29	1987-02-20 17:45:25	Early 2010s	Old	Old	1987	1987-02-21 19:48:29	48	NULL	1987-02-22
-1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1987-05-29
-1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1998-10-17
+1987-05-28 13:52:07.900916635	1987-05-27 11:49:03.900916635	Early 2010s	Old	Old	1987	1987-05-28 13:52:07.900916635	52	NULL	1988-05-27
+1998-10-16 20:05:29.397591987	1998-10-15 18:02:25.397591987	Early 2010s	Old	Old	1998	1998-10-16 20:05:29.397591987	5	NULL	1999-10-16
 1999-10-03 16:59:10.396903939	1999-10-02 14:56:06.396903939	Early 2010s	Old	Old	1999	1999-10-03 16:59:10.396903939	59	NULL	1999-10-04
 2000-12-18 08:42:30.000595596	2000-12-17 06:39:26.000595596	Early 2010s	Old	Old	2000	2018-03-08 23:04:59	42	NULL	2000-12-19
-2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2003-05-10
-2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2003-09-24
+2002-05-10 05:29:48.990818073	2002-05-09 03:26:44.990818073	Early 2010s	Early 2000s	Early 2000s	2002	2018-03-08 23:04:59	29	NULL	2002-05-11
+2003-09-23 22:33:17.00003252	2003-09-22 20:30:13.00003252	Early 2010s	Early 2000s	Early 2000s	2003	2018-03-08 23:04:59	33	NULL	2004-09-22
 2004-03-07 20:14:13	2004-03-06 18:11:09	Early 2010s	Early 2000s	Early 2000s	2004	2018-03-08 23:04:59	14	NULL	2004-03-08
-2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2007-02-10
+2007-02-09 05:17:29.368756876	2007-02-08 03:14:25.368756876	Late 2000s	Late 2000s	Late 2000s	2007	2018-03-08 23:04:59	17	NULL	2008-02-09
 2009-01-21 10:49:07.108	2009-01-20 08:46:03.108	Late 2000s	Late 2000s	Late 2000s	2009	2018-03-08 23:04:59	49	NULL	2009-01-22
 2010-04-08 02:43:35.861742727	2010-04-07 00:40:31.861742727	Late 2000s	Late 2000s	Late 2000s	2010	2018-03-08 23:04:59	43	NULL	2010-04-09
 2013-04-07 02:44:43.00086821	2013-04-06 00:41:39.00086821	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	44	NULL	2013-04-08
 2013-04-10 00:43:46.854731546	2013-04-08 22:40:42.854731546	Early 2010s	Early 2010s	NULL	2013	2018-03-08 23:04:59	43	NULL	2013-04-11
-2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2022-09-24
+2021-09-24 03:18:32.413655165	2021-09-23 01:15:28.413655165	Unknown	NULL	NULL	2021	2018-03-08 23:04:59	NULL	NULL	2021-09-25
 2024-11-11 16:42:41.101	2024-11-10 14:39:37.101	Unknown	NULL	NULL	2024	2018-03-08 23:04:59	42	NULL	2024-11-12
 4143-07-08 10:53:27.252802259	4143-07-07 08:50:23.252802259	Unknown	NULL	NULL	4143	2018-03-08 23:04:59	53	NULL	4143-07-09
 4966-12-04 09:30:55.202	4966-12-03 07:27:51.202	Unknown	NULL	NULL	4966	2018-03-08 23:04:59	30	NULL	4966-12-05
-5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5339-02-02
+5339-02-01 14:10:01.085678691	5339-01-31 12:06:57.085678691	Unknown	NULL	NULL	5339	2018-03-08 23:04:59	10	NULL	5340-02-01
 5344-10-04 18:40:08.165	5344-10-03 16:37:04.165	Unknown	NULL	NULL	5344	2018-03-08 23:04:59	40	NULL	5344-10-05
 5397-07-13 07:12:32.000896438	5397-07-12 05:09:28.000896438	Unknown	NULL	NULL	5397	2018-03-08 23:04:59	12	12	5397-07-14
-5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5966-07-10
-6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6229-06-29
-6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6483-04-27
+5966-07-09 03:30:50.597	5966-07-08 01:27:46.597	Unknown	NULL	NULL	5966	2018-03-08 23:04:59	30	30	5967-07-09
+6229-06-28 02:54:28.970117179	6229-06-27 00:51:24.970117179	Unknown	NULL	NULL	6229	2018-03-08 23:04:59	54	54	6230-06-28
+6482-04-27 12:07:38.073915413	6482-04-26 10:04:34.073915413	Unknown	NULL	NULL	6482	2018-03-08 23:04:59	7	7	6482-04-28
 6631-11-13 16:31:29.702202248	6631-11-12 14:28:25.702202248	Unknown	NULL	NULL	6631	2018-03-08 23:04:59	31	31	6631-11-14
 6705-09-28 18:27:28.000845672	6705-09-27 16:24:24.000845672	Unknown	NULL	NULL	6705	2018-03-08 23:04:59	27	NULL	6705-09-29
 6731-02-12 08:12:48.287783702	6731-02-11 06:09:44.287783702	Unknown	NULL	NULL	6731	2018-03-08 23:04:59	12	NULL	6731-02-13
-7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7160-12-03
-7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7410-09-07
+7160-12-02 06:00:24.81200852	7160-12-01 03:57:20.81200852	Unknown	NULL	NULL	7160	2018-03-08 23:04:59	0	NULL	7161-12-02
+7409-09-07 23:33:32.459349602	7409-09-06 21:30:28.459349602	Unknown	NULL	NULL	7409	2018-03-08 23:04:59	33	NULL	7409-09-08
 7503-06-23 23:14:17.486	7503-06-22 21:11:13.486	Unknown	NULL	NULL	7503	2018-03-08 23:04:59	14	NULL	7503-06-24
 8422-07-22 03:21:45.745036084	8422-07-21 01:18:41.745036084	Unknown	NULL	NULL	8422	2018-03-08 23:04:59	21	NULL	8422-07-23
 8521-01-16 20:42:05.668832388	8521-01-15 18:39:01.668832388	Unknown	NULL	NULL	8521	2018-03-08 23:04:59	42	NULL	8521-01-17
 9075-06-13 16:20:09.218517797	9075-06-12 14:17:05.218517797	Unknown	NULL	NULL	9075	2018-03-08 23:04:59	20	NULL	9075-06-14
 9209-11-11 04:08:58.223768453	9209-11-10 02:05:54.223768453	Unknown	NULL	NULL	9209	2018-03-08 23:04:59	8	NULL	9209-11-12
-9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9404-01-09
+9403-01-09 18:12:33.547	9403-01-08 16:09:29.547	Unknown	NULL	NULL	9403	2018-03-08 23:04:59	12	NULL	9403-01-10

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_data_types.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_data_types.q.out b/ql/src/test/results/clientpositive/vector_data_types.q.out
index b9d8ac8..dad2abd 100644
--- a/ql/src/test/results/clientpositive/vector_data_types.q.out
+++ b/ql/src/test/results/clientpositive/vector_data_types.q.out
@@ -199,7 +199,7 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
@@ -403,4 +403,4 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_decimal_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
index e616912..ee6895b 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
@@ -861,7 +861,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_1
 #### A masked pattern was here ####
 NULL
-1970-01-01 00:00:17.29
+1969-12-31 16:00:17.29
 PREHOOK: query: drop table decimal_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
index a265185..1c9ca38 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
@@ -83,16 +83,16 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-28813
--15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-28804
--9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-28816
-15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-28810
-7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	-28785
-4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	-28793
--7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	-28795
--15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-28808
--15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-28815
-5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-28816
+-13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-13
+-15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-4
+-9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-16
+15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-10
+7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	15
+4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	7
+-7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	5
+-15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-8
+-15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-15
+5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-16
 PREHOOK: query: CREATE TABLE alltypes_small STORED AS TEXTFILE AS SELECT * FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
@@ -202,13 +202,13 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_small
 #### A masked pattern was here ####
--13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-28813
--15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-28804
--9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-28816
-15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-28810
-7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	-28785
-4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	-28793
--7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	-28795
--15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-28808
--15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-28815
-5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-28816
+-13326.0	528534767	true	1969-12-31 15:59:46.674	-13326.0000000000	528534767.00000000000000	1.00	-13
+-15813.0	528534767	true	1969-12-31 15:59:55.787	-15813.0000000000	528534767.00000000000000	1.00	-4
+-9566.0	528534767	true	1969-12-31 15:59:44.187	-9566.0000000000	528534767.00000000000000	1.00	-16
+15007.0	528534767	true	1969-12-31 15:59:50.434	15007.0000000000	528534767.00000000000000	1.00	-10
+7021.0	528534767	true	1969-12-31 16:00:15.007	7021.0000000000	528534767.00000000000000	1.00	15
+4963.0	528534767	true	1969-12-31 16:00:07.021	4963.0000000000	528534767.00000000000000	1.00	7
+-7824.0	528534767	true	1969-12-31 16:00:04.963	-7824.0000000000	528534767.00000000000000	1.00	5
+-15431.0	528534767	true	1969-12-31 15:59:52.176	-15431.0000000000	528534767.00000000000000	1.00	-8
+-15549.0	528534767	true	1969-12-31 15:59:44.569	-15549.0000000000	528534767.00000000000000	1.00	-15
+5780.0	528534767	true	1969-12-31 15:59:44.451	5780.0000000000	528534767.00000000000000	1.00	-16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
index 4caace2..674d3f7 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
@@ -136,16 +136,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_n1
 #### A masked pattern was here ####
-1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1970-01-01 00:13:55.618918918
-1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1970-01-01 00:14:04.578378378
-1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1970-01-01 00:14:05.772972973
-1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1970-01-01 00:14:07.564864864
-1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1970-01-01 00:14:17.121621621
-1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1970-01-01 00:14:18.316216216
-1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1970-01-01 00:14:18.913513513
-1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1970-01-01 00:14:22.497297297
-1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1970-01-01 00:14:29.067567567
-1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1970-01-01 00:14:30.859459459
+1836.44199584197700	-1166.02723492725400	0.8372697814834	245972.55810810255804469	5.6189189189	835	1000	NULL	835	true	1000.823076923077	835.6189	1000.823076923077	1969-12-31 16:13:55.618918918
+1856.13222453224620	-1178.52931392929240	0.8372449787014	251275.44324324968747899	4.5783783784	844	1011	NULL	844	true	1011.5538461538462	844.57837	1011.5538461538462	1969-12-31 16:14:04.578378378
+1858.75758835761550	-1180.19625779623100	0.8372417113669	251986.76756757564861519	5.7729729730	845	1012	NULL	845	true	1012.9846153846155	845.77295	1012.9846153846155	1969-12-31 16:14:05.772972973
+1862.69563409566930	-1182.69667359663860	0.8372368276345	253055.63918919969667286	7.5648648649	847	1015	NULL	847	true	1015.1307692307693	847.5649	1015.1307692307693	1969-12-31 16:14:07.564864864
+1883.69854469852330	-1196.03222453224660	0.8372111259286	258794.49324323677116559	7.1216216216	857	1026	NULL	857	true	1026.5769230769233	857.12164	1026.5769230769233	1969-12-31 16:14:17.121621621
+1886.32390852389240	-1197.69916839918480	0.8372079534582	259516.37432431944456816	8.3162162162	858	1028	NULL	858	true	1028.0076923076924	858.3162	1028.0076923076924	1969-12-31 16:14:18.316216216
+1887.63659043657700	-1198.53264033265400	0.8372063705322	259877.69189188782259834	8.9135135135	858	1028	NULL	858	true	1028.723076923077	858.9135	1028.723076923077	1969-12-31 16:14:18.913513513
+1895.51268191268460	-1203.53347193346920	0.8371969190171	262050.87567567649292835	2.4972972973	862	1033	NULL	862	true	1033.0153846153846	862.4973	1033.0153846153846	1969-12-31 16:14:22.497297297
+1909.95218295221550	-1212.70166320163100	0.8371797936946	266058.54729730725574014	9.0675675676	869	1040	NULL	869	true	1040.8846153846155	869.06757	1040.8846153846155	1969-12-31 16:14:29.067567567
+1913.89022869026920	-1215.20207900203840	0.8371751679996	267156.82702703945592392	0.8594594595	870	1043	NULL	870	true	1043.0307692307692	870.85944	1043.0307692307692	1969-12-31 16:14:30.859459459
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_n1 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
@@ -280,16 +280,16 @@ LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_test_small_n0
 #### A masked pattern was here ####
-1836.439	-1166.021	0.83727243660	245971.826152056	5.619	835	1000	NULL	835	true	1000.82	835.619	1000.82	1970-01-01 00:13:55.619
-1856.128	-1178.522	0.83724778805	251274.375364068	4.578	844	1011	NULL	844	true	1011.55	844.578	1011.55	1970-01-01 00:14:04.578
-1858.753	-1180.187	0.83724555273	251985.627412262	5.773	845	1012	NULL	845	true	1012.98	845.773	1012.98	1970-01-01 00:14:05.773
-1862.695	-1182.695	0.83723759518	253055.487729555	7.565	847	1015	NULL	847	true	1015.13	847.565	1015.13	1970-01-01 00:14:07.565
-1883.702	-1196.038	0.83720898517	258795.383063868	7.122	857	1026	NULL	857	true	1026.58	857.122	1026.58	1970-01-01 00:14:17.122
-1886.326	-1197.704	0.83720586376	259516.891214712	8.316	858	1028	NULL	858	true	1028.01	858.316	1028.01	1970-01-01 00:14:18.316
-1887.634	-1198.526	0.83720934754	259877.061889284	8.914	858	1028	NULL	858	true	1028.72	858.914	1028.72	1970-01-01 00:14:18.914
-1895.517	-1203.543	0.83719289075	262051.956361764	2.497	862	1033	NULL	862	true	1033.02	862.497	1033.02	1970-01-01 00:14:22.497
-1909.948	-1212.692	0.83718392130	266057.499543968	9.068	869	1040	NULL	869	true	1040.88	869.068	1040.88	1970-01-01 00:14:29.068
-1913.889	-1215.201	0.83717534491	267156.488691411	0.859	870	1043	NULL	870	true	1043.03	870.859	1043.03	1970-01-01 00:14:30.859
+1836.439	-1166.021	0.83727243660	245971.826152056	5.619	835	1000	NULL	835	true	1000.82	835.619	1000.82	1969-12-31 16:13:55.619
+1856.128	-1178.522	0.83724778805	251274.375364068	4.578	844	1011	NULL	844	true	1011.55	844.578	1011.55	1969-12-31 16:14:04.578
+1858.753	-1180.187	0.83724555273	251985.627412262	5.773	845	1012	NULL	845	true	1012.98	845.773	1012.98	1969-12-31 16:14:05.773
+1862.695	-1182.695	0.83723759518	253055.487729555	7.565	847	1015	NULL	847	true	1015.13	847.565	1015.13	1969-12-31 16:14:07.565
+1883.702	-1196.038	0.83720898517	258795.383063868	7.122	857	1026	NULL	857	true	1026.58	857.122	1026.58	1969-12-31 16:14:17.122
+1886.326	-1197.704	0.83720586376	259516.891214712	8.316	858	1028	NULL	858	true	1028.01	858.316	1028.01	1969-12-31 16:14:18.316
+1887.634	-1198.526	0.83720934754	259877.061889284	8.914	858	1028	NULL	858	true	1028.72	858.914	1028.72	1969-12-31 16:14:18.914
+1895.517	-1203.543	0.83719289075	262051.956361764	2.497	862	1033	NULL	862	true	1033.02	862.497	1033.02	1969-12-31 16:14:22.497
+1909.948	-1212.692	0.83718392130	266057.499543968	9.068	869	1040	NULL	869	true	1040.88	869.068	1040.88	1969-12-31 16:14:29.068
+1913.889	-1215.201	0.83717534491	267156.488691411	0.859	870	1043	NULL	870	true	1043.03	870.859	1043.03	1969-12-31 16:14:30.859
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_small_n0 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
 ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_interval_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_interval_1.q.out b/ql/src/test/results/clientpositive/vector_interval_1.q.out
index 7bbd4a7..6ee3154 100644
--- a/ql/src/test/results/clientpositive/vector_interval_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_interval_1.q.out
@@ -713,13 +713,13 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time)
+              expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time)
               outputColumnNames: _col0, _col1, _col2, _col3
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [0, 5, 6, 7]
-                  selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03) -> 7:interval_day_time
+                  selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03.0, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03.0) -> 7:interval_day_time
               Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: timestamp)
@@ -826,7 +826,7 @@ STAGE PLANS:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [1, 5, 6, 7]
-                  selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01) -> 7:interval_day_time
+                  selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01 00:00:00.0, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01 00:00:00.0) -> 7:interval_day_time
               Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: date)
@@ -933,13 +933,13 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
+              expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [1, 5, 6, 7, 8, 9, 10]
-                  selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01, col 0:timestamp) -> 10:interval_day_time
+                  selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03.0, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01 00:00:00.0) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03.0) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01 00:00:00.0, col 0:timestamp) -> 10:interval_day_time
               Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: date)


[25/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
index 86511a6..0d524d3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
@@ -303,83 +304,83 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
     // test timestamp string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
@@ -397,113 +398,113 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-05-11 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-04-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-07-01 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-07-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-12-31 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-10-01", udf, initArgs, evalArgs);
 
     // test timestamp string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-05-11 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-04-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-07-01 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-07-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-12-31 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-10-01", udf, initArgs, evalArgs);
@@ -521,83 +522,83 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 00:00:00")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
     // test timestamp string
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-01 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-14 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-01-31 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-02 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2014-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-03 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-28 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new TimestampWritableV2(
+    valueObj0 = new DeferredJavaObject(new TimestampWritable(
         Timestamp.valueOf("2016-02-29 10:30:45")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
@@ -615,35 +616,35 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-02-01", udf, initArgs, evalArgs);
   }
@@ -660,47 +661,47 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-05-11")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-05-11")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-04-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-07-01")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-07-01")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-07-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-12-31")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-12-31")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-10-01", udf, initArgs, evalArgs);
   }
@@ -717,35 +718,35 @@ public class TestGenericUDFTrunc extends TestCase {
     DeferredObject[] evalArgs;
 
     // test date string
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2014-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
 
-    valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
+    valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
     evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
     runAndVerify("2016-01-01", udf, initArgs, evalArgs);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
index d0bd08c..85bb9b3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.hive.ql.util;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.TimeZone;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.junit.*;
@@ -92,9 +92,9 @@ public class TestDateTimeMath {
     try {
       TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
       checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6",
-          "2001-07-01 01:02:03");
+          "2001-07-01 02:02:03");
         checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6",
-          "2002-01-01 01:02:03");
+          "2002-01-01 00:02:03");
 
       TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
       checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6",
@@ -215,15 +215,15 @@ public class TestDateTimeMath {
       checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01",
           "2015-03-08 01:59:59");
       checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01",
-          "2015-03-08 02:00:00");
+          "2015-03-08 03:00:00");
       checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:01",
-          "2015-03-08 02:59:59");
+          "2015-03-08 01:59:59");
       checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.005",
-          "2015-03-08 02:00:00");
+          "2015-03-08 03:00:00");
       checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.0051",
-          "2015-03-08 02:00:00.0001");
+          "2015-03-08 03:00:00.0001");
       checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:0.005",
-          "2015-03-08 02:59:59.995");
+          "2015-03-08 01:59:59.995");
       checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:58", plus, "0 0:0:01",
           "2015-11-01 01:59:59");
       checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:59", plus, "0 0:0:01",
@@ -286,10 +286,10 @@ public class TestDateTimeMath {
     TimeZone originalTz = TimeZone.getDefault();
     try {
       TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
-      checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0");
-      checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 0:0:0");
-      checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 0:0:0");
-      checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 0:0:0");
+      checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 1:0:0");
+      checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 1:0:0");
+      checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 1:0:0");
+      checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 1:0:0");
 
       TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
       checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0");

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientnegative/date_literal3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/date_literal3.q b/ql/src/test/queries/clientnegative/date_literal3.q
new file mode 100644
index 0000000..e5a241d
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/date_literal3.q
@@ -0,0 +1,3 @@
+--! qt:dataset:src
+-- Invalid date value
+SELECT DATE '2001-01-32' FROM src;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/date_udf.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/date_udf.q b/ql/src/test/queries/clientpositive/date_udf.q
index aa33b7a..cddfc1a 100644
--- a/ql/src/test/queries/clientpositive/date_udf.q
+++ b/ql/src/test/queries/clientpositive/date_udf.q
@@ -21,7 +21,7 @@ create table date_udf_flight (
 LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight;
 
 -- Test UDFs with date input
-select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
     weekofyear(d), to_date(d)
   from date_udf;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/druid_timestamptz2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz2.q b/ql/src/test/queries/clientpositive/druid_timestamptz2.q
deleted file mode 100644
index 8f573c8..0000000
--- a/ql/src/test/queries/clientpositive/druid_timestamptz2.q
+++ /dev/null
@@ -1,60 +0,0 @@
-CREATE database druid_test_dst;
-use druid_test_dst;
-
-
-
-create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double);
-insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4);
-insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1);
-insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4);
-insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1);
-insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2);
-insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2);
-
-CREATE TABLE druid_test_table_1
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table;
-
-select * FROM druid_test_table_1;
-
-CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY");
-
-
-insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4);
-insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1);
-insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4);
-insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1);
-insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2);
-insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2);
-
-select * FROM druid_test_table_2;
-
-SET TIME ZONE UTC;
-
-CREATE TABLE druid_test_table_utc
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table;
-
-select * FROM druid_test_table_utc;
-
-CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY");
-
-
-insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4);
-insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1);
-insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4);
-insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1);
-insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2);
-insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2);
-
-select * FROM druid_test_table_utc2;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/localtimezone.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/localtimezone.q b/ql/src/test/queries/clientpositive/localtimezone.q
index 7456972..27b036b 100644
--- a/ql/src/test/queries/clientpositive/localtimezone.q
+++ b/ql/src/test/queries/clientpositive/localtimezone.q
@@ -5,35 +5,35 @@ drop table `timestamptz_test`;
 create table `date_test` (`mydate1` date);
 
 insert into `date_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz');
 
 create table `timestamp_test` (`mydate1` timestamp);
 
 insert into `timestamp_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz');
 
 create table `timestamptz_test` (`mydate1` timestamp with local time zone);
 
 insert into `timestamptz_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz');
 
 select * from `date_test`;
 select * from `timestamp_test`;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/localtimezone2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/localtimezone2.q b/ql/src/test/queries/clientpositive/localtimezone2.q
deleted file mode 100644
index 911fca0..0000000
--- a/ql/src/test/queries/clientpositive/localtimezone2.q
+++ /dev/null
@@ -1,55 +0,0 @@
-drop table `table_tsltz`;
-
-CREATE TABLE table_tsltz (tz VARCHAR(200),
-                         c_ts1 TIMESTAMP,
-                         c_ts2 TIMESTAMP,
-                         c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
-                         c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE);
-
-set time zone GMT-08:00;
-
-insert into table_tsltz values (
-  '-08:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
-
-set time zone UTC;
-
-insert into table_tsltz values (
-  'UTC',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
-
-set time zone GMT+02:00;
-
-insert into table_tsltz values (
-  '+02:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
-
-set time zone US/Pacific;
-
-insert into table_tsltz values (
-  'US/Pacific',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
-
-select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz;
-
-set time zone UTC;
-
-select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/parquet_ppd_char.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/parquet_ppd_char.q b/ql/src/test/queries/clientpositive/parquet_ppd_char.q
index 4230d8c..386fb25 100644
--- a/ql/src/test/queries/clientpositive/parquet_ppd_char.q
+++ b/ql/src/test/queries/clientpositive/parquet_ppd_char.q
@@ -1,7 +1,6 @@
 --! qt:dataset:src1
 --! qt:dataset:src
 
-set hive.parquet.timestamp.skip.conversion=true;
 set hive.vectorized.execution.enabled=false;
 SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
 SET hive.optimize.ppd=true;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/parquet_vectorization_13.q b/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
index 0b23f50..e5f48c8 100644
--- a/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
+++ b/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
@@ -32,8 +32,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -64,8 +64,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -98,8 +98,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -130,8 +130,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/parquet_vectorization_7.q b/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
index 55f21af..d0a73a3 100644
--- a/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
+++ b/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
@@ -24,11 +24,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -50,11 +50,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -79,11 +79,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -105,11 +105,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/singletsinsertorc.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/singletsinsertorc.q b/ql/src/test/queries/clientpositive/singletsinsertorc.q
deleted file mode 100644
index 3e4c43e..0000000
--- a/ql/src/test/queries/clientpositive/singletsinsertorc.q
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE myorctable(ts timestamp)
-STORED AS ORC;
-
-INSERT INTO myorctable VALUES ('1970-01-01 00:00:00');
-
-SELECT * FROM myorctable;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/timestamp_comparison2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamp_comparison2.q b/ql/src/test/queries/clientpositive/timestamp_comparison2.q
index dfb4fd7..affc36f 100644
--- a/ql/src/test/queries/clientpositive/timestamp_comparison2.q
+++ b/ql/src/test/queries/clientpositive/timestamp_comparison2.q
@@ -17,8 +17,8 @@ FROM   alltypesorc
 WHERE  
 ((ctinyint != 0)
     AND 
-        (((ctimestamp1 <= timestamp('1970-01-01 00:00:00'))
+        (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) 
             OR ((ctinyint = cint) OR (cstring2 LIKE 'ss')))
          AND ((988888 < cdouble)
-             OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble)))))
+             OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble)))))
 ;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/timestamp_dst.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamp_dst.q b/ql/src/test/queries/clientpositive/timestamp_dst.q
deleted file mode 100644
index 4dda5a9..0000000
--- a/ql/src/test/queries/clientpositive/timestamp_dst.q
+++ /dev/null
@@ -1,2 +0,0 @@
-select TIMESTAMP '2015-03-08 02:10:00.101';
-

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/udf_reflect2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/udf_reflect2.q b/ql/src/test/queries/clientpositive/udf_reflect2.q
index 7810746..7ebe914 100644
--- a/ql/src/test/queries/clientpositive/udf_reflect2.q
+++ b/ql/src/test/queries/clientpositive/udf_reflect2.q
@@ -36,7 +36,7 @@ SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5;
 
 
@@ -70,5 +70,5 @@ SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/vectorization_13.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorization_13.q b/ql/src/test/queries/clientpositive/vectorization_13.q
index b9e3fa2..dd7981d 100644
--- a/ql/src/test/queries/clientpositive/vectorization_13.q
+++ b/ql/src/test/queries/clientpositive/vectorization_13.q
@@ -32,8 +32,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -64,8 +64,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -98,8 +98,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -130,8 +130,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/vectorization_7.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorization_7.q b/ql/src/test/queries/clientpositive/vectorization_7.q
index 855e2cf..ac0cc10 100644
--- a/ql/src/test/queries/clientpositive/vectorization_7.q
+++ b/ql/src/test/queries/clientpositive/vectorization_7.q
@@ -24,11 +24,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -50,11 +50,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -79,11 +79,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;
@@ -105,11 +105,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
index 68e5576..c38ef09 100644
--- a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
+++ b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
@@ -4,6 +4,5 @@ set hive.fetch.task.conversion=none;
 
 CREATE TABLE date_decimal_test STORED AS ORC AS SELECT cint, cdouble, CAST (CAST (cint AS TIMESTAMP) AS DATE) AS cdate, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal FROM alltypesorc;
 SET hive.vectorized.execution.enabled=true;
-EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
--- 528534767 is 'Wednesday, January 7, 1970 2:48:54 AM'
-SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
+EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
+SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientnegative/date_literal3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/date_literal3.q.out b/ql/src/test/results/clientnegative/date_literal3.q.out
new file mode 100644
index 0000000..f51de7c
--- /dev/null
+++ b/ql/src/test/results/clientnegative/date_literal3.q.out
@@ -0,0 +1 @@
+FAILED: SemanticException Unable to convert time literal '2001-01-32' to time value.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out b/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
index e768eca..c86a85c 100644
--- a/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
@@ -41,7 +41,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009-03-20 11:30:01	1237548601
+2009-03-20 11:30:01	1237573801
 PREHOOK: query: SELECT
   '2009-03-20',
   unix_timestamp('2009-03-20', 'yyyy-MM-dd')
@@ -56,7 +56,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009-03-20	1237507200
+2009-03-20	1237532400
 PREHOOK: query: SELECT
   '2009 Mar 20 11:30:01 am',
   unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
@@ -71,7 +71,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009 Mar 20 11:30:01 am	1237548601
+2009 Mar 20 11:30:01 am	1237573801
 unix_timestamp(void) is deprecated. Use current_timestamp instead.
 unix_timestamp(void) is deprecated. Use current_timestamp instead.
 PREHOOK: query: create table foo_n3 as SELECT

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/cast_on_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cast_on_constant.q.out b/ql/src/test/results/clientpositive/cast_on_constant.q.out
index 731129c..5923209 100644
--- a/ql/src/test/results/clientpositive/cast_on_constant.q.out
+++ b/ql/src/test/results/clientpositive/cast_on_constant.q.out
@@ -22,10 +22,10 @@ STAGE PLANS:
             alias: t1_n138
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: (ts_field = TIMESTAMP'2016-01-23 00:00:00') (type: boolean)
+              predicate: (ts_field = TIMESTAMP'2016-01-23 00:00:00.0') (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
-                expressions: TIMESTAMP'2016-01-23 00:00:00' (type: timestamp), date_field (type: date)
+                expressions: TIMESTAMP'2016-01-23 00:00:00.0' (type: timestamp), date_field (type: date)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator
@@ -96,10 +96,10 @@ STAGE PLANS:
             alias: t1_n138
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: (ts_field = TIMESTAMP'2016-01-23 00:00:00') (type: boolean)
+              predicate: (ts_field = TIMESTAMP'2016-01-23 00:00:00.0') (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
-                expressions: TIMESTAMP'2016-01-23 00:00:00' (type: timestamp), date_field (type: date)
+                expressions: TIMESTAMP'2016-01-23 00:00:00.0' (type: timestamp), date_field (type: date)
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/confirm_initial_tbl_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/confirm_initial_tbl_stats.q.out b/ql/src/test/results/clientpositive/confirm_initial_tbl_stats.q.out
index e97e97d..3d64168 100644
--- a/ql/src/test/results/clientpositive/confirm_initial_tbl_stats.q.out
+++ b/ql/src/test/results/clientpositive/confirm_initial_tbl_stats.q.out
@@ -272,10 +272,10 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alltypesorc
 col_name            	ctimestamp1         	 	 	 	 	 	 	 	 	 	 
 data_type           	timestamp           	 	 	 	 	 	 	 	 	 	 
-min                 	-28830              	 	 	 	 	 	 	 	 	 	 
-max                 	-28769              	 	 	 	 	 	 	 	 	 	 
+min                 	-30                 	 	 	 	 	 	 	 	 	 	 
+max                 	31                  	 	 	 	 	 	 	 	 	 	 
 num_nulls           	3115                	 	 	 	 	 	 	 	 	 	 
-distinct_count      	35                  	 	 	 	 	 	 	 	 	 	 
+distinct_count      	36                  	 	 	 	 	 	 	 	 	 	 
 avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
 max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
 num_trues           	                    	 	 	 	 	 	 	 	 	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/constprog_type.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/constprog_type.q.out b/ql/src/test/results/clientpositive/constprog_type.q.out
index a3f6358..99a4746 100644
--- a/ql/src/test/results/clientpositive/constprog_type.q.out
+++ b/ql/src/test/results/clientpositive/constprog_type.q.out
@@ -35,7 +35,7 @@ STAGE PLANS:
             Row Limit Per Split: 1
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
-              expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-30 03:46:56.4485' (type: timestamp)
+              expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-29 20:46:56.4485' (type: timestamp)
               outputColumnNames: _col0, _col1
               Statistics: Num rows: 500 Data size: 48000 Basic stats: COMPLETE Column stats: COMPLETE
               File Output Operator
@@ -123,7 +123,7 @@ POSTHOOK: query: SELECT * FROM dest1_n26
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n26
 #### A masked pattern was here ####
-2013-11-17	2011-04-30 03:46:56.4485
+2013-11-17	2011-04-29 20:46:56.4485
 PREHOOK: query: SELECT key, value FROM src WHERE key = cast(86 as double)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/date_udf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/date_udf.q.out b/ql/src/test/results/clientpositive/date_udf.q.out
index 7681a50..37ad29e 100644
--- a/ql/src/test/results/clientpositive/date_udf.q.out
+++ b/ql/src/test/results/clientpositive/date_udf.q.out
@@ -74,19 +74,19 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OV
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@date_udf_flight
-PREHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+PREHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
     weekofyear(d), to_date(d)
   from date_udf
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_udf
 #### A masked pattern was here ####
-POSTHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+POSTHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
     weekofyear(d), to_date(d)
   from date_udf
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf
 #### A masked pattern was here ####
-1304665200	1304640000	2011	5	6	6	18	2011-05-06
+1304665200	2011	5	6	6	18	2011-05-06
 PREHOOK: query: select date_add(d, 5), date_sub(d, 10)
   from date_udf
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/decimal_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/decimal_1.q.out b/ql/src/test/results/clientpositive/decimal_1.q.out
index ed61abe..f5c92f3 100644
--- a/ql/src/test/results/clientpositive/decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/decimal_1.q.out
@@ -120,7 +120,7 @@ POSTHOOK: query: select cast(t as timestamp) from decimal_1_n0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_1_n0
 #### A masked pattern was here ####
-1970-01-01 00:00:17.29
+1969-12-31 16:00:17.29
 PREHOOK: query: drop table decimal_1_n0
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@decimal_1_n0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/decimal_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/decimal_2.q.out b/ql/src/test/results/clientpositive/decimal_2.q.out
index 22f4c33..56e08d7 100644
--- a/ql/src/test/results/clientpositive/decimal_2.q.out
+++ b/ql/src/test/results/clientpositive/decimal_2.q.out
@@ -210,7 +210,7 @@ POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_2_n1
 #### A masked pattern was here ####
-1355915539.1234567
+1355944339.1234567
 PREHOOK: query: select cast(true as decimal) from decimal_2_n1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_2_n1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
deleted file mode 100644
index c71a435..0000000
--- a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
+++ /dev/null
@@ -1,308 +0,0 @@
-PREHOOK: query: CREATE database druid_test_dst
-PREHOOK: type: CREATEDATABASE
-PREHOOK: Output: database:druid_test_dst
-POSTHOOK: query: CREATE database druid_test_dst
-POSTHOOK: type: CREATEDATABASE
-POSTHOOK: Output: database:druid_test_dst
-PREHOOK: query: use druid_test_dst
-PREHOOK: type: SWITCHDATABASE
-PREHOOK: Input: database:druid_test_dst
-POSTHOOK: query: use druid_test_dst
-POSTHOOK: type: SWITCHDATABASE
-POSTHOOK: Input: database:druid_test_dst
-PREHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:druid_test_dst
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:druid_test_dst
-POSTHOOK: Output: druid_test_dst@test_base_table
-PREHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@test_base_table
-POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
-POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
-POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
-PREHOOK: query: CREATE TABLE druid_test_table_1
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: druid_test_dst@test_base_table
-PREHOOK: Output: database:druid_test_dst
-PREHOOK: Output: druid_test_dst@druid_test_table_1
-POSTHOOK: query: CREATE TABLE druid_test_table_1
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: druid_test_dst@test_base_table
-POSTHOOK: Output: database:druid_test_dst
-POSTHOOK: Output: druid_test_dst@druid_test_table_1
-POSTHOOK: Lineage: druid_test_table_1.__time EXPRESSION [(test_base_table)test_base_table.FieldSchema(name:timecolumn, type:timestamp, comment:null), ]
-POSTHOOK: Lineage: druid_test_table_1.interval_marker SIMPLE [(test_base_table)test_base_table.FieldSchema(name:interval_marker, type:string, comment:null), ]
-POSTHOOK: Lineage: druid_test_table_1.num_l SIMPLE [(test_base_table)test_base_table.FieldSchema(name:num_l, type:double, comment:null), ]
-PREHOOK: query: select * FROM druid_test_table_1
-PREHOOK: type: QUERY
-PREHOOK: Input: druid_test_dst@druid_test_table_1
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select * FROM druid_test_table_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: druid_test_dst@druid_test_table_1
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-2015-03-08 00:00:00.0 US/Pacific	i1-start	4.0
-2015-03-08 23:59:59.0 US/Pacific	i1-end	1.0
-2015-03-09 00:00:00.0 US/Pacific	i2-start	4.0
-2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
-2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
-2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:druid_test_dst
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:druid_test_dst
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_2
-POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_2
-PREHOOK: query: select * FROM druid_test_table_2
-PREHOOK: type: QUERY
-PREHOOK: Input: druid_test_dst@druid_test_table_2
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select * FROM druid_test_table_2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: druid_test_dst@druid_test_table_2
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-2015-03-08 00:00:00.0 US/Pacific	i1-start	4.0
-2015-03-08 23:59:59.0 US/Pacific	i1-end	1.0
-2015-03-09 00:00:00.0 US/Pacific	i2-start	4.0
-2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
-2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
-2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_utc
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: druid_test_dst@test_base_table
-PREHOOK: Output: database:druid_test_dst
-PREHOOK: Output: druid_test_dst@druid_test_table_utc
-POSTHOOK: query: CREATE TABLE druid_test_table_utc
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-AS
-select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
-FROM druid_test_dst.test_base_table
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: druid_test_dst@test_base_table
-POSTHOOK: Output: database:druid_test_dst
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc
-POSTHOOK: Lineage: druid_test_table_utc.__time EXPRESSION [(test_base_table)test_base_table.FieldSchema(name:timecolumn, type:timestamp, comment:null), ]
-POSTHOOK: Lineage: druid_test_table_utc.interval_marker SIMPLE [(test_base_table)test_base_table.FieldSchema(name:interval_marker, type:string, comment:null), ]
-POSTHOOK: Lineage: druid_test_table_utc.num_l SIMPLE [(test_base_table)test_base_table.FieldSchema(name:num_l, type:double, comment:null), ]
-PREHOOK: query: select * FROM druid_test_table_utc
-PREHOOK: type: QUERY
-PREHOOK: Input: druid_test_dst@druid_test_table_utc
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select * FROM druid_test_table_utc
-POSTHOOK: type: QUERY
-POSTHOOK: Input: druid_test_dst@druid_test_table_utc
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-2015-03-08 00:00:00.0 UTC	i1-start	4.0
-2015-03-08 23:59:59.0 UTC	i1-end	1.0
-2015-03-09 00:00:00.0 UTC	i2-start	4.0
-2015-03-09 23:59:59.0 UTC	i2-end	1.0
-2015-03-10 00:00:00.0 UTC	i3-start	2.0
-2015-03-10 23:59:59.0 UTC	i3-end	2.0
-PREHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:druid_test_dst
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
-STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
-TBLPROPERTIES ("druid.segment.granularity" = "DAY")
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:druid_test_dst
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: druid_test_dst@druid_test_table_utc2
-POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
-PREHOOK: query: select * FROM druid_test_table_utc2
-PREHOOK: type: QUERY
-PREHOOK: Input: druid_test_dst@druid_test_table_utc2
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select * FROM druid_test_table_utc2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: druid_test_dst@druid_test_table_utc2
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-2015-03-08 00:00:00.0 UTC	i1-start	4.0
-2015-03-08 23:59:59.0 UTC	i1-end	1.0
-2015-03-09 00:00:00.0 UTC	i2-start	4.0
-2015-03-09 23:59:59.0 UTC	i2-end	1.0
-2015-03-10 00:00:00.0 UTC	i3-start	2.0
-2015-03-10 23:59:59.0 UTC	i3-end	2.0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/foldts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/foldts.q.out b/ql/src/test/results/clientpositive/foldts.q.out
index 83a6481..2a82b9a 100644
--- a/ql/src/test/results/clientpositive/foldts.q.out
+++ b/ql/src/test/results/clientpositive/foldts.q.out
@@ -45,7 +45,7 @@ POSTHOOK: query: select ctimestamp1, unix_timestamp(ctimestamp1), to_unix_timest
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 15:59:46.674	-28813	-28813
+1969-12-31 15:59:46.674	-13	-13
 PREHOOK: query: create temporary table src1orc stored as orc as select * from src1
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src1


[05/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out
index cc53e98..608e85a 100644
--- a/ql/src/test/results/clientpositive/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out
@@ -180,7 +180,7 @@ STAGE PLANS:
                     className: VectorSelectOperator
                     native: true
                     projectedOutputColumnNums: [13, 14, 15, 16, 17, 18, 10, 20, 19, 21, 0, 1, 2, 3, 22, 23, 10, 24, 25, 27, 28, 29, 30, 31, 32, 33, 34, 4, 5, 35, 36, 37, 38, 39, 5, 41, 43, 45, 47, 48, 49, 51, 54, 55, 8, 56, 57, 26, 58, 59, 60, 61, 62, 63, 64, 65, 6, 67, 68, 69, 70, 66, 73]
-                    selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLong(col
  4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51:times
 tamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, CastTimestampToString(col 8:timestamp) -> 65:string, CastStringGroupTo
 String(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
+                    selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLong(col
  4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51:times
 tamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, VectorUDFAdaptor(UDFToString(ctimestamp1)) -> 65:string, CastStringGro
 upToString(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
                 Statistics: Num rows: 6144 Data size: 1453997 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
@@ -367,29 +367,29 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-true	NULL	true	true	true	NULL	false	false	true	true	-51	NULL	773600971	1053923250	-51	NULL	0	-28792	NULL	2	-51	-51	-51	-51.0	NULL	7.73600971E8	1.05392325E9	-51.0	NULL	0.0	-28791.549	NULL	2.0	7.7360096E8	NULL	1969-12-31 23:59:59.949	NULL	1970-01-09 22:53:20.971	1970-01-13 04:45:23.25	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	773600971	1053923250	-51.0	NULL	FALSE	0	1969-12-31 16:00:08.451	2yK4Bx76O	2yK4Bx76O	2yK4Bx76O	-51.0	1.547201942E9	-0.6702291758433747	7.7360096E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-102936434	-1312782750	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	-1.02936434E8	-1.31278275E9	8.0	NULL	0.0	-28784.108	NULL	NULL	-1.02936432E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-30 19:24:23.566	1969-12-16 19:20:17.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-102936434	-1312782750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	eJROSNhugc3kQR7Pb	eJROSNhugc	eJROSNhugc	8.0	-2.05872868E8	0.9893582466233818	-1.02936432E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-661621138	-931392750	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	-6.61621138E8	-9.3139275E8	8.0	NULL	0.0	-28784.108	NULL	NULL	-6.6162112E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-24 08:12:58.862	1969-12-21 05:16:47.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-661621138	-931392750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	L15l8i5k558tBcDV20	L15l8i5k55	L15l8i5k55	8.0	-1.323242276E9	0.9893582466233818	-6.6162112E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-669632311	1588591250	8	NULL	0	-28785	NULL	3	8	8	8	8.0	NULL	-6.69632311E8	1.58859125E9	8.0	NULL	0.0	-28784.108	NULL	3.0	-6.6963232E8	NULL	1970-01-01 00:00:00.008	NULL	1969-12-24 05:59:27.689	1970-01-19 09:16:31.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-669632311	1588591250	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	3r3sDvfUkG0yTP3LnX5mNQRr	3r3sDvfUkG	3r3sDvfUkG	8.0	-1.339264622E9	0.9893582466233818	-6.6963232E8
-true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	805179664	868161500	8	NULL	0	-28785	NULL	NULL	8	8	8	8.0	NULL	8.05179664E8	8.681615E8	8.0	NULL	0.0	-28784.108	NULL	NULL	8.0517965E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-10 07:39:39.664	1970-01-11 01:09:21.5	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	805179664	868161500	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	e005B5q	e005B5q	e005B5q	8.0	1.610359328E9	0.9893582466233818	8.05179648E8
-true	NULL	true	true	true	NULL	true	false	true	true	-51	NULL	747553882	-1930467250	-51	NULL	1	-28792	NULL	NULL	-51	-51	-51	-51.0	NULL	7.47553882E8	-1.93046725E9	-51.0	NULL	1.0	-28791.549	NULL	NULL	7.4755386E8	NULL	1969-12-31 23:59:59.949	NULL	1970-01-09 15:39:13.882	1969-12-09 15:45:32.75	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	747553882	-1930467250	-51.0	NULL	TRUE	0	1969-12-31 16:00:08.451	q8M86Fx0r	q8M86Fx0r	q8M86Fx0r	-51.0	1.495107764E9	-0.6702291758433747	7.47553857E8
-true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-335450417	1233327000	11	NULL	1	-28798	NULL	NULL	11	11	11	11.0	NULL	-3.35450417E8	1.233327E9	11.0	NULL	1.0	-28797.649	NULL	NULL	-3.35450432E8	NULL	1970-01-01 00:00:00.011	NULL	1969-12-28 02:49:09.583	1970-01-15 06:35:27	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-335450417	1233327000	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	dOYnqgaXoJ1P3ERwxe5N7	dOYnqgaXoJ	dOYnqgaXoJ	11.0	-6.70900834E8	-0.9999902065507035	-3.35450431E8
-true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750	11	NULL	1	-28798	NULL	8	11	11	11	11.0	NULL	-6.4615982E7	1.80305375E9	11.0	NULL	1.0	-28797.649	NULL	8.0	-6.4615984E7	NULL	1970-01-01 00:00:00.011	NULL	1969-12-31 06:03:04.018	1970-01-21 20:50:53.75	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-64615982	1803053750	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	8J5OB7K26PEV7kdbeHr3	8J5OB7K26P	8J5OB7K26P	11.0	-1.29231964E8	-0.9999902065507035	-6.4615983E7
-true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	-28785	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	-28784.108	NULL	NULL	8.9098899E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-11 07:29:48.972	1969-12-10 10:41:39	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	XylAH4	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988993E8
-true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	-28785	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	-28784.108	NULL	NULL	9.3086726E8	NULL	1970-01-01 00:00:00.008	NULL	1970-01-11 18:34:27.246	1970-01-14 22:49:59.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	c1V8o1A	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867265E8
-true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	-28789	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	-28788.935	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.986	1969-12-31 23:59:52.804	NULL	1969-12-14 00:50:00.5	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	NULL	NULL	-14.0	NULL	-0.9906073556948704	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-28805	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-28804.1	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.979	1969-12-31 23:59:52.804	NULL	1970-01-18 20:27:09	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	NULL	NULL	-21.0	NULL	-0.8366556385360561	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-28807	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-28806.855	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.976	1969-12-31 23:59:52.804	NULL	1970-01-10 14:18:31	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	NULL	NULL	-24.0	NULL	0.9055783620066238	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	-28788	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	-28787.065	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.97	1969-12-31 23:59:59.8	NULL	1970-01-17 13:10:52.25	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	NULL	NULL	-30.0	NULL	0.9880316240928618	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-28815	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-28814.252	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1969-12-08 18:43:03.25	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-28807	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-28806.183	NULL	NULL	NULL	-200.0	1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1970-01-19 12:24:39	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-28802	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-28801.386	NULL	NULL	NULL	15601.0	1969-12-31 23:59:59.962	1970-01-01 00:00:15.601	NULL	1969-12-10 11:41:51	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	NULL	NULL	-38.0	NULL	-0.2963685787093853	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	-28796	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	-28795.321	NULL	NULL	NULL	15601.0	1969-12-31 23:59:59.995	1970-01-01 00:00:15.601	NULL	1970-01-08 02:06:56	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	NULL	NULL	-5.0	NULL	0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-28806	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-28805.267	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.95	1969-12-31 23:59:52.804	NULL	1969-12-20 01:33:32.75	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	NULL	NULL	-50.0	NULL	0.26237485370392877	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	-28787	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	-28786.85	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.941	1969-12-31 23:59:52.804	NULL	1969-12-13 10:11:50	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	NULL	NULL	-59.0	NULL	-0.6367380071391379	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-28808	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-28807.592	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.94	1969-12-31 23:59:52.804	NULL	1970-01-18 13:11:54.75	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	NULL	NULL	-60.0	NULL	0.3048106211022167	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	-28797	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	-28796.864	NULL	NULL	NULL	-7196.0	1969-12-31 23:59:59.992	1969-12-31 23:59:52.804	NULL	1969-12-10 14:06:48.5	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	NULL	NULL	-8.0	NULL	-0.9893582466233818	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-28815	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-28814.871	NULL	NULL	NULL	15601.0	1970-01-01 00:00:00.02	1970-01-01 00:00:15.601	NULL	1969-12-27 19:19:26.75	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	NULL	NULL	20.0	NULL	0.9129452507276277	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-28810	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-28809.765	NULL	NULL	NULL	15601.0	1970-01-01 00:00:00.048	1970-01-01 00:00:15.601	NULL	1969-12-22 19:03:59	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	NULL	NULL	48.0	NULL	-0.7682546613236668	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	-28790	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	-28789.027	NULL	NULL	NULL	-7196.0	1970-01-01 00:00:00.005	1969-12-31 23:59:52.804	NULL	1969-12-20 05:53:12.5	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	NULL	NULL	5.0	NULL	-0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	-28790	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	-28789.044	NULL	NULL	NULL	-7196.0	1970-01-01 00:00:00.059	1969-12-31 23:59:52.804	NULL	1969-12-18 19:57:25.5	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	NULL	NULL	59.0	NULL	0.6367380071391379	NULL
+true	NULL	true	true	true	NULL	false	false	true	true	-51	NULL	773600971	1053923250	-51	NULL	0	8	NULL	2	-51	-51	-51	-51.0	NULL	7.73600971E8	1.05392325E9	-51.0	NULL	0.0	8.451	NULL	2.0	7.7360096E8	NULL	1969-12-31 15:59:59.949	NULL	1970-01-09 14:53:20.971	1970-01-12 20:45:23.25	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	773600971	1053923250	-51.0	NULL	FALSE	0	1969-12-31 16:00:08.451	2yK4Bx76O	2yK4Bx76O	2yK4Bx76O	-51.0	1.547201942E9	-0.6702291758433747	7.7360096E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-102936434	-1312782750	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	-1.02936434E8	-1.31278275E9	8.0	NULL	0.0	15.892	NULL	NULL	-1.02936432E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-30 11:24:23.566	1969-12-16 11:20:17.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-102936434	-1312782750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	eJROSNhugc3kQR7Pb	eJROSNhugc	eJROSNhugc	8.0	-2.05872868E8	0.9893582466233818	-1.02936432E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-661621138	-931392750	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	-6.61621138E8	-9.3139275E8	8.0	NULL	0.0	15.892	NULL	NULL	-6.6162112E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-24 00:12:58.862	1969-12-20 21:16:47.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-661621138	-931392750	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	L15l8i5k558tBcDV20	L15l8i5k55	L15l8i5k55	8.0	-1.323242276E9	0.9893582466233818	-6.6162112E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	-669632311	1588591250	8	NULL	0	15	NULL	3	8	8	8	8.0	NULL	-6.69632311E8	1.58859125E9	8.0	NULL	0.0	15.892	NULL	3.0	-6.6963232E8	NULL	1969-12-31 16:00:00.008	NULL	1969-12-23 21:59:27.689	1970-01-19 01:16:31.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	-669632311	1588591250	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	3r3sDvfUkG0yTP3LnX5mNQRr	3r3sDvfUkG	3r3sDvfUkG	8.0	-1.339264622E9	0.9893582466233818	-6.6963232E8
+true	NULL	true	true	true	NULL	false	false	true	true	8	NULL	805179664	868161500	8	NULL	0	15	NULL	NULL	8	8	8	8.0	NULL	8.05179664E8	8.681615E8	8.0	NULL	0.0	15.892	NULL	NULL	8.0517965E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-09 23:39:39.664	1970-01-10 17:09:21.5	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	805179664	868161500	8.0	NULL	FALSE	0	1969-12-31 16:00:15.892	e005B5q	e005B5q	e005B5q	8.0	1.610359328E9	0.9893582466233818	8.05179648E8
+true	NULL	true	true	true	NULL	true	false	true	true	-51	NULL	747553882	-1930467250	-51	NULL	1	8	NULL	NULL	-51	-51	-51	-51.0	NULL	7.47553882E8	-1.93046725E9	-51.0	NULL	1.0	8.451	NULL	NULL	7.4755386E8	NULL	1969-12-31 15:59:59.949	NULL	1970-01-09 07:39:13.882	1969-12-09 07:45:32.75	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:08.451	NULL	NULL	-51	NULL	747553882	-1930467250	-51.0	NULL	TRUE	0	1969-12-31 16:00:08.451	q8M86Fx0r	q8M86Fx0r	q8M86Fx0r	-51.0	1.495107764E9	-0.6702291758433747	7.47553857E8
+true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-335450417	1233327000	11	NULL	1	2	NULL	NULL	11	11	11	11.0	NULL	-3.35450417E8	1.233327E9	11.0	NULL	1.0	2.351	NULL	NULL	-3.35450432E8	NULL	1969-12-31 16:00:00.011	NULL	1969-12-27 18:49:09.583	1970-01-14 22:35:27	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-335450417	1233327000	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	dOYnqgaXoJ1P3ERwxe5N7	dOYnqgaXoJ	dOYnqgaXoJ	11.0	-6.70900834E8	-0.9999902065507035	-3.35450431E8
+true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750	11	NULL	1	2	NULL	8	11	11	11	11.0	NULL	-6.4615982E7	1.80305375E9	11.0	NULL	1.0	2.351	NULL	8.0	-6.4615984E7	NULL	1969-12-31 16:00:00.011	NULL	1969-12-30 22:03:04.018	1970-01-21 12:50:53.75	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:02.351	NULL	NULL	11	NULL	-64615982	1803053750	11.0	NULL	TRUE	0	1969-12-31 16:00:02.351	8J5OB7K26PEV7kdbeHr3	8J5OB7K26P	8J5OB7K26P	11.0	-1.29231964E8	-0.9999902065507035	-6.4615983E7
+true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	15.892	NULL	NULL	8.9098899E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	XylAH4	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988993E8
+true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	15.892	NULL	NULL	9.3086726E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	c1V8o1A	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867265E8
+true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	11	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	11.065	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	NULL	NULL	-14.0	NULL	-0.9906073556948704	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-5	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	NULL	NULL	-21.0	NULL	-0.8366556385360561	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-7	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	NULL	NULL	-24.0	NULL	0.9055783620066238	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	12	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	12.935	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	NULL	NULL	-30.0	NULL	0.9880316240928618	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-15	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-7	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	NULL	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-2	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.3860000000000001	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	NULL	NULL	-38.0	NULL	-0.2963685787093853	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	4	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	4.679	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	NULL	NULL	-5.0	NULL	0.9589242746631385	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-6	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	NULL	NULL	-50.0	NULL	0.26237485370392877	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	13	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	13.15	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	NULL	NULL	-59.0	NULL	-0.6367380071391379	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-8	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	NULL	NULL	-60.0	NULL	0.3048106211022167	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	3	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	3.136	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	NULL	NULL	-8.0	NULL	-0.9893582466233818	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-15	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	NULL	NULL	20.0	NULL	0.9129452507276277	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-10	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	NULL	NULL	48.0	NULL	-0.7682546613236668	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	10	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	10.973	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	NULL	NULL	5.0	NULL	-0.9589242746631385	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	10	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	10.956	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 00:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	NULL	NULL	59.0	NULL	0.6367380071391379	NULL


[08/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_2.q.out b/ql/src/test/results/clientpositive/timestamp_2.q.out
index ddd1ef1..9a05dfe 100644
--- a/ql/src/test/results/clientpositive/timestamp_2.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_2.q.out
@@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436611E9
+1.2938724611E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661001E9
+1.293872461001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_3.q.out b/ql/src/test/results/clientpositive/timestamp_3.q.out
index 269ed86..6d59269 100644
--- a/ql/src/test/results/clientpositive/timestamp_3.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_3.q.out
@@ -100,7 +100,7 @@ POSTHOOK: query: select cast(t as string) from timestamp_3 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-2011-04-30 03:46:56.4485
+2011-04-29 20:46:56.4485
 PREHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timestamp_3 group by t
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3
@@ -109,7 +109,7 @@ POSTHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timest
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-2011-04-30 03:46:56.4485	1.3041352164485E9	1	1.3041352164485E9	1.3041352164485E9
+2011-04-29 20:46:56.4485	1.3041352164485E9	1	1.3041352164485E9	1.3041352164485E9
 PREHOOK: query: drop table timestamp_3
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@timestamp_3

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_comparison2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_comparison2.q.out b/ql/src/test/results/clientpositive/timestamp_comparison2.q.out
index 490154e..8ef2552 100644
--- a/ql/src/test/results/clientpositive/timestamp_comparison2.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_comparison2.q.out
@@ -22,16 +22,16 @@ WHERE
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-3872
+1826
 PREHOOK: query: select count(*)
 FROM   alltypesorc
 WHERE  
 ((ctinyint != 0)
     AND 
-        (((ctimestamp1 <= timestamp('1970-01-01 00:00:00'))
+        (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) 
             OR ((ctinyint = cint) OR (cstring2 LIKE 'ss')))
          AND ((988888 < cdouble)
-             OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble)))))
+             OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble)))))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
@@ -40,11 +40,11 @@ FROM   alltypesorc
 WHERE  
 ((ctinyint != 0)
     AND 
-        (((ctimestamp1 <= timestamp('1970-01-01 00:00:00'))
+        (((ctimestamp1 <= timestamp('1969-12-31 16:00:00')) 
             OR ((ctinyint = cint) OR (cstring2 LIKE 'ss')))
          AND ((988888 < cdouble)
-             OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble)))))
+             OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble)))))
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-3872
+1826

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_dst.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_dst.q.out b/ql/src/test/results/clientpositive/timestamp_dst.q.out
deleted file mode 100644
index 72a5ebf..0000000
--- a/ql/src/test/results/clientpositive/timestamp_dst.q.out
+++ /dev/null
@@ -1,9 +0,0 @@
-PREHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101'
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-#### A masked pattern was here ####
-POSTHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-#### A masked pattern was here ####
-2015-03-08 02:10:00.101

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_formats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_formats.q.out b/ql/src/test/results/clientpositive/timestamp_formats.q.out
index 4995c5f..98afa30 100644
--- a/ql/src/test/results/clientpositive/timestamp_formats.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_formats.q.out
@@ -36,27 +36,27 @@ POSTHOOK: query: SELECT * FROM timestamp_formats
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_formats
 #### A masked pattern was here ####
-2011-01-01 01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01	2011-01-01 01:01:01
-2012-02-02 02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02	2012-02-02 02:02:02
-2013-03-03 03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03	2013-03-03 03:03:03
-2014-04-04 04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04	2014-04-04 04:04:04
-2015-05-05 05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05	2015-05-05 05:05:05
-2016-06-06 06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06	2016-06-06 06:06:06
-2017-07-07 07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07	2017-07-07 07:07:07
-2018-08-08 08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08	2018-08-08 08:08:08
-2019-09-09 09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09	2019-09-09 09:09:09
-2020-10-10 10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10	2020-10-10 10:10:10
-2021-11-11 11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11	2021-11-11 11:11:11
-2022-12-12 12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12	2022-12-12 12:12:12
-2023-01-02 13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13	2023-01-02 13:13:13
-2024-02-02 14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14	2024-02-02 14:14:14
-2025-03-03 15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15	2025-03-03 15:15:15
-2026-04-04 16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16	2026-04-04 16:16:16
-2027-05-05 17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17	2027-05-05 17:17:17
-2028-06-06 18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18	2028-06-06 18:18:18
-2029-07-07 19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19	2029-07-07 19:19:19
-2030-08-08 20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20	2030-08-08 20:20:20
-2031-09-09 21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21	2031-09-09 21:21:21
+2011-01-01 01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01.111111111	NULL	2011-01-01T01:01:01	NULL
+2012-02-02 02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02.222222222	NULL	2012-02-02T02:02:02	NULL
+2013-03-03 03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03.333333333	NULL	2013-03-03T03:03:03	NULL
+2014-04-04 04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04.444444444	NULL	2014-04-04T04:04:04	NULL
+2015-05-05 05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05.555555555	NULL	2015-05-05T05:05:05	NULL
+2016-06-06 06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06.666666666	NULL	2016-06-06T06:06:06	NULL
+2017-07-07 07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07.777777777	NULL	2017-07-07T07:07:07	NULL
+2018-08-08 08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08.888888888	NULL	2018-08-08T08:08:08	NULL
+2019-09-09 09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09.999999999	NULL	2019-09-09T09:09:09	NULL
+2020-10-10 10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10.101010101	NULL	2020-10-10T10:10:10	NULL
+2021-11-11 11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11.111111111	NULL	2021-11-11T11:11:11	NULL
+2022-12-12 12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12.121212121	NULL	2022-12-12T12:12:12	NULL
+2023-01-02 13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13.131313131	NULL	2023-01-02T13:13:13	NULL
+2024-02-02 14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14.141414141	NULL	2024-02-02T14:14:14	NULL
+2025-03-03 15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15.151515151	NULL	2025-03-03T15:15:15	NULL
+2026-04-04 16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16.161616161	NULL	2026-04-04T16:16:16	NULL
+2027-05-05 17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17.171717171	NULL	2027-05-05T17:17:17	NULL
+2028-06-06 18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18.181818181	NULL	2028-06-06T18:18:18	NULL
+2029-07-07 19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19.191919191	NULL	2029-07-07T19:19:19	NULL
+2030-08-08 20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20.202020202	NULL	2030-08-08T20:20:20	NULL
+2031-09-09 21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21.212121212	NULL	2031-09-09T21:21:21	NULL
 PREHOOK: query: ALTER TABLE timestamp_formats SET SERDEPROPERTIES ("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss")
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@timestamp_formats
@@ -73,27 +73,27 @@ POSTHOOK: query: SELECT * FROM timestamp_formats
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_formats
 #### A masked pattern was here ####
-2011-01-01 01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01	2011-01-01 01:01:01
-2012-02-02 02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02	2012-02-02 02:02:02
-2013-03-03 03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03	2013-03-03 03:03:03
-2014-04-04 04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04	2014-04-04 04:04:04
-2015-05-05 05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05	2015-05-05 05:05:05
-2016-06-06 06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06	2016-06-06 06:06:06
-2017-07-07 07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07	2017-07-07 07:07:07
-2018-08-08 08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08	2018-08-08 08:08:08
-2019-09-09 09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09	2019-09-09 09:09:09
-2020-10-10 10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10	2020-10-10 10:10:10
-2021-11-11 11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11	2021-11-11 11:11:11
-2022-12-12 12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12	2022-12-12 12:12:12
-2023-01-02 13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13	2023-01-02 13:13:13
-2024-02-02 14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14	2024-02-02 14:14:14
-2025-03-03 15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15	2025-03-03 15:15:15
-2026-04-04 16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16	2026-04-04 16:16:16
-2027-05-05 17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17	2027-05-05 17:17:17
-2028-06-06 18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18	2028-06-06 18:18:18
-2029-07-07 19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19	2029-07-07 19:19:19
-2030-08-08 20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20	2030-08-08 20:20:20
-2031-09-09 21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21	2031-09-09 21:21:21
+2011-01-01 01:01:01.111111111	2011-01-01 01:01:01.111111111	2011-01-01T01:01:01.111111111	NULL	2011-01-01T01:01:01	2011-01-01 01:01:01
+2012-02-02 02:02:02.222222222	2012-02-02 02:02:02.222222222	2012-02-02T02:02:02.222222222	NULL	2012-02-02T02:02:02	2012-02-02 02:02:02
+2013-03-03 03:03:03.333333333	2013-03-03 03:03:03.333333333	2013-03-03T03:03:03.333333333	NULL	2013-03-03T03:03:03	2013-03-03 03:03:03
+2014-04-04 04:04:04.444444444	2014-04-04 04:04:04.444444444	2014-04-04T04:04:04.444444444	NULL	2014-04-04T04:04:04	2014-04-04 04:04:04
+2015-05-05 05:05:05.555555555	2015-05-05 05:05:05.555555555	2015-05-05T05:05:05.555555555	NULL	2015-05-05T05:05:05	2015-05-05 05:05:05
+2016-06-06 06:06:06.666666666	2016-06-06 06:06:06.666666666	2016-06-06T06:06:06.666666666	NULL	2016-06-06T06:06:06	2016-06-06 06:06:06
+2017-07-07 07:07:07.777777777	2017-07-07 07:07:07.777777777	2017-07-07T07:07:07.777777777	NULL	2017-07-07T07:07:07	2017-07-07 07:07:07
+2018-08-08 08:08:08.888888888	2018-08-08 08:08:08.888888888	2018-08-08T08:08:08.888888888	NULL	2018-08-08T08:08:08	2018-08-08 08:08:08
+2019-09-09 09:09:09.999999999	2019-09-09 09:09:09.999999999	2019-09-09T09:09:09.999999999	NULL	2019-09-09T09:09:09	2019-09-09 09:09:09
+2020-10-10 10:10:10.101010101	2020-10-10 10:10:10.101010101	2020-10-10T10:10:10.101010101	NULL	2020-10-10T10:10:10	2020-10-10 10:10:10
+2021-11-11 11:11:11.111111111	2021-11-11 11:11:11.111111111	2021-11-11T11:11:11.111111111	NULL	2021-11-11T11:11:11	2021-11-11 11:11:11
+2022-12-12 12:12:12.121212121	2022-12-12 12:12:12.121212121	2022-12-12T12:12:12.121212121	NULL	2022-12-12T12:12:12	2022-12-12 12:12:12
+2023-01-02 13:13:13.131313131	2023-01-02 13:13:13.131313131	2023-01-02T13:13:13.131313131	NULL	2023-01-02T13:13:13	2023-01-02 13:13:13
+2024-02-02 14:14:14.141414141	2024-02-02 14:14:14.141414141	2024-02-02T14:14:14.141414141	NULL	2024-02-02T14:14:14	2024-02-02 14:14:14
+2025-03-03 15:15:15.151515151	2025-03-03 15:15:15.151515151	2025-03-03T15:15:15.151515151	NULL	2025-03-03T15:15:15	2025-03-03 15:15:15
+2026-04-04 16:16:16.161616161	2026-04-04 16:16:16.161616161	2026-04-04T16:16:16.161616161	NULL	2026-04-04T16:16:16	2026-04-04 16:16:16
+2027-05-05 17:17:17.171717171	2027-05-05 17:17:17.171717171	2027-05-05T17:17:17.171717171	NULL	2027-05-05T17:17:17	2027-05-05 17:17:17
+2028-06-06 18:18:18.181818181	2028-06-06 18:18:18.181818181	2028-06-06T18:18:18.181818181	NULL	2028-06-06T18:18:18	2028-06-06 18:18:18
+2029-07-07 19:19:19.191919191	2029-07-07 19:19:19.191919191	2029-07-07T19:19:19.191919191	NULL	2029-07-07T19:19:19	2029-07-07 19:19:19
+2030-08-08 20:20:20.202020202	2030-08-08 20:20:20.202020202	2030-08-08T20:20:20.202020202	NULL	2030-08-08T20:20:20	2030-08-08 20:20:20
+2031-09-09 21:21:21.212121212	2031-09-09 21:21:21.212121212	2031-09-09T21:21:21.212121212	NULL	2031-09-09T21:21:21	2031-09-09 21:21:21
 PREHOOK: query: ALTER TABLE timestamp_formats SET SERDEPROPERTIES ("timestamp.formats"="yyyy-MM-dd'T'HH:mm:ss,yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS")
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@timestamp_formats

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
index 7c4108d..f2de761 100644
--- a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
@@ -105,32 +105,32 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1969-12-08 18:43:03.25	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1970-01-19 12:24:39	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:59.97	1969-12-31 23:59:59.8	NULL	1970-01-17 13:10:52.25	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 22:53:20.971	1970-01-13 04:45:23.25	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 15:39:13.882	1969-12-09 15:45:32.75	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:00.02	1970-01-01 00:00:15.601	NULL	1969-12-27 19:19:26.75	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:59.962	1970-01-01 00:00:15.601	NULL	1969-12-10 11:41:51	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:59.995	1970-01-01 00:00:15.601	NULL	1970-01-08 02:06:56	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:00.048	1970-01-01 00:00:15.601	NULL	1969-12-22 19:03:59	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 08:12:58.862	1969-12-21 05:16:47.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-30 19:24:23.566	1969-12-16 19:20:17.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-10 07:39:39.664	1970-01-11 01:09:21.5	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 05:59:27.689	1970-01-19 09:16:31.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 07:29:48.972	1969-12-10 10:41:39	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 18:34:27.246	1970-01-14 22:49:59.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:59.941	1969-12-31 23:59:52.804	NULL	1969-12-13 10:11:50	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:59.979	1969-12-31 23:59:52.804	NULL	1970-01-18 20:27:09	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:59.94	1969-12-31 23:59:52.804	NULL	1970-01-18 13:11:54.75	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:59.986	1969-12-31 23:59:52.804	NULL	1969-12-14 00:50:00.5	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:00.059	1969-12-31 23:59:52.804	NULL	1969-12-18 19:57:25.5	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:59.992	1969-12-31 23:59:52.804	NULL	1969-12-10 14:06:48.5	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:00.005	1969-12-31 23:59:52.804	NULL	1969-12-20 05:53:12.5	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:59.976	1969-12-31 23:59:52.804	NULL	1970-01-10 14:18:31	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:59.95	1969-12-31 23:59:52.804	NULL	1969-12-20 01:33:32.75	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-31 06:03:04.018	1970-01-21 20:50:53.75	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-28 02:49:09.583	1970-01-15 06:35:27	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 14:53:20.971	1970-01-12 20:45:23.25	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 07:39:13.882	1969-12-09 07:45:32.75	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-24 00:12:58.862	1969-12-20 21:16:47.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-30 11:24:23.566	1969-12-16 11:20:17.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-09 23:39:39.664	1970-01-10 17:09:21.5	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-23 21:59:27.689	1970-01-19 01:16:31.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-30 22:03:04.018	1970-01-21 12:50:53.75	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-27 18:49:09.583	1970-01-14 22:35:27	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
 PREHOOK: query: explain
 select
 
@@ -238,29 +238,29 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1906-06-05 21:34:10	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	2020-09-12 02:50:00	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	2015-04-24 05:10:50	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:09	NULL	1994-07-07 17:09:31	2003-05-26 04:27:30	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:09	NULL	1993-09-09 05:51:22	1908-10-29 15:05:50	1969-12-31 23:59:09	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1958-07-08 04:05:50	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1911-02-07 09:30:00	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1989-05-29 03:33:20	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1944-10-18 10:23:20	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:08	NULL	1949-01-13 08:21:02	1940-06-26 23:47:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1966-09-27 14:32:46	1928-05-26 18:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1995-07-08 05:01:04	1997-07-06 03:58:20	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1948-10-12 15:01:29	2020-05-04 11:20:50	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1998-03-27 08:56:12	1910-12-27 14:10:00	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1999-07-01 22:14:06	2008-03-13 09:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1919-02-22 21:13:20	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	2018-11-17 04:30:00	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	2018-01-18 22:32:30	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1920-10-24 17:28:20	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1933-12-12 13:05:00	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1911-05-19 01:28:20	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1937-10-26 06:48:20	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1996-04-10 04:36:40	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1937-04-28 23:05:50	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:11	NULL	1967-12-15 03:06:58	2027-02-19 16:15:50	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:11	NULL	1959-05-16 11:19:43	2009-01-30 14:50:00	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1906-06-05 13:34:10	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	2020-09-11 19:50:00	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	2015-04-23 22:10:50	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:09	NULL	1994-07-07 10:09:31	2003-05-25 21:27:30	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:09	NULL	1993-09-08 22:51:22	1908-10-29 07:05:50	1969-12-31 15:59:09	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1958-07-07 21:05:50	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1911-02-07 01:30:00	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1989-05-28 20:33:20	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1944-10-18 03:23:20	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:08	NULL	1949-01-13 00:21:02	1940-06-26 15:47:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1966-09-27 07:32:46	1928-05-26 10:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1995-07-07 22:01:04	1997-07-05 20:58:20	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1948-10-12 08:01:29	2020-05-04 04:20:50	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1998-03-27 00:56:12	1910-12-27 06:10:00	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1999-07-01 15:14:06	2008-03-13 02:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1919-02-22 13:13:20	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	2018-11-16 20:30:00	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	2018-01-18 14:32:30	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1920-10-24 09:28:20	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1933-12-12 05:05:00	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1911-05-18 17:28:20	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1937-10-25 22:48:20	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1996-04-09 21:36:40	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1937-04-28 15:05:50	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:11	NULL	1967-12-14 19:06:58	2027-02-19 08:15:50	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:11	NULL	1959-05-16 04:19:43	2009-01-30 06:50:00	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_literal.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_literal.q.out b/ql/src/test/results/clientpositive/timestamp_literal.q.out
index 55d7ad5..67750bb 100644
--- a/ql/src/test/results/clientpositive/timestamp_literal.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_literal.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: TIMESTAMP'2011-01-01 01:01:01' (type: timestamp)
+            expressions: TIMESTAMP'2011-01-01 01:01:01.0' (type: timestamp)
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
             ListSink

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_udf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_udf.q.out b/ql/src/test/results/clientpositive/timestamp_udf.q.out
index 201e4da..452f366 100644
--- a/ql/src/test/results/clientpositive/timestamp_udf.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_udf.q.out
@@ -54,7 +54,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_udf_n0
 #### A masked pattern was here ####
-1304665689	2011	5	6	6	18	7	8	9	2011-05-06
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
 PREHOOK: query: select date_add(t, 5), date_sub(t, 10)
   from timestamp_udf_n0
 PREHOOK: type: QUERY
@@ -155,7 +155,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_udf_string
 #### A masked pattern was here ####
-1304665689	2011	5	6	6	18	7	8	9	2011-05-06
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
 PREHOOK: query: select date_add(t, 5), date_sub(t, 10)  from timestamp_udf_string
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_udf_string

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamptz_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_3.q.out b/ql/src/test/results/clientpositive/timestamptz_3.q.out
index f1d9379..3b5ea2a 100644
--- a/ql/src/test/results/clientpositive/timestamptz_3.q.out
+++ b/ql/src/test/results/clientpositive/timestamptz_3.q.out
@@ -36,7 +36,7 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1_n1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tstz1_n1
 #### A masked pattern was here ####
-2016-01-03 20:26:34
+2016-01-03 12:26:34
 PREHOOK: query: select cast(t as timestamp) from tstz1_n1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tstz1_n1
@@ -54,4 +54,4 @@ POSTHOOK: query: select cast(to_epoch_milli(t) as timestamp) from tstz1_n1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tstz1_n1
 #### A masked pattern was here ####
-2016-01-03 20:26:34
+2016-01-03 12:26:34

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/typechangetest.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/typechangetest.q.out b/ql/src/test/results/clientpositive/typechangetest.q.out
index 49ec29e..5ca96a5 100644
--- a/ql/src/test/results/clientpositive/typechangetest.q.out
+++ b/ql/src/test/results/clientpositive/typechangetest.q.out
@@ -1258,10 +1258,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testaltcolorc_n0
 #### A masked pattern was here ####
-1	2017-11-07 01:02:49.999999999
-2	1399-12-23 17:01:01.000000001
-3	1399-12-23 17:01:01.000000001
-4	1399-12-23 17:01:01.000000001
+1	2017-11-07 09:02:49.999999999
+2	1400-01-01 01:01:01.000000001
+3	1400-01-01 01:01:01.000000001
+4	1400-01-01 01:01:01.000000001
 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testaltcolorc_n0
@@ -1334,10 +1334,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testaltcolorc_n0
 #### A masked pattern was here ####
-1	2017-11-07 01:02:49.999999999
-2	1399-12-23 17:01:01.000000001
-3	1399-12-23 17:01:01.000000001
-4	1399-12-23 17:01:01.000000001
+1	2017-11-07 09:02:49.999999999
+2	1400-01-01 01:01:01.000000001
+3	1400-01-01 01:01:01.000000001
+4	1400-01-01 01:01:01.000000001
 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testaltcolorc_n0
@@ -1410,10 +1410,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testaltcolorc_n0
 #### A masked pattern was here ####
-1	2017-11-07 01:02:49.999999999                                                                       
-2	1399-12-23 17:01:01.000000001                                                                       
-3	1399-12-23 17:01:01.000000001                                                                       
-4	1399-12-23 17:01:01.000000001                                                                       
+1	2017-11-07 09:02:49.999999999                                                                       
+2	1400-01-01 01:01:01.000000001                                                                       
+3	1400-01-01 01:01:01.000000001                                                                       
+4	1400-01-01 01:01:01.000000001                                                                       
 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testaltcolorc_n0
@@ -1487,9 +1487,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testaltcolorc_n0
 #### A masked pattern was here ####
 1	2017
-2	1399
-3	1399
-4	1399
+2	1400
+3	1400
+4	1400
 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testaltcolorc_n0
@@ -1563,9 +1563,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@testaltcolorc_n0
 #### A masked pattern was here ####
 1	2017
-2	1399
-3	1399
-4	1399
+2	1400
+3	1400
+4	1400
 PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testaltcolorc_n0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf5.q.out b/ql/src/test/results/clientpositive/udf5.q.out
index 04327e8..573eba5 100644
--- a/ql/src/test/results/clientpositive/udf5.q.out
+++ b/ql/src/test/results/clientpositive/udf5.q.out
@@ -33,7 +33,7 @@ STAGE PLANS:
           alias: dest1_n14
           Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: '2008-11-11 23:32:20' (type: string), DATE'2008-11-11' (type: date), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int)
+            expressions: '2008-11-11 15:32:20' (type: string), DATE'2008-11-11' (type: date), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
             Statistics: Num rows: 1 Data size: 183 Basic stats: COMPLETE Column stats: COMPLETE
             ListSink
@@ -46,7 +46,7 @@ POSTHOOK: query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1_n14
 #### A masked pattern was here ####
-2008-11-11 23:32:20	2008-11-11	1	11	2008	1	11	2008
+2008-11-11 15:32:20	2008-11-11	1	11	2008	1	11	2008
 PREHOOK: query: EXPLAIN
 SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss'), from_unixtime(unix_timestamp('2010-01-13 11:57:40')) from dest1_n14
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_folder_constants.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_folder_constants.q.out b/ql/src/test/results/clientpositive/udf_folder_constants.q.out
index a33571c..33070c7 100644
--- a/ql/src/test/results/clientpositive/udf_folder_constants.q.out
+++ b/ql/src/test/results/clientpositive/udf_folder_constants.q.out
@@ -94,7 +94,7 @@ STAGE PLANS:
             1 _col0 (type: int)
           Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: 978307200L (type: bigint)
+            expressions: 978336000L (type: bigint)
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
@@ -121,4 +121,4 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@udf_tb1
 POSTHOOK: Input: default@udf_tb2
 #### A masked pattern was here ####
-978307200
+978336000

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_from_utc_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_from_utc_timestamp.q.out b/ql/src/test/results/clientpositive/udf_from_utc_timestamp.q.out
index 7e76090..d650e39 100644
--- a/ql/src/test/results/clientpositive/udf_from_utc_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_from_utc_timestamp.q.out
@@ -27,7 +27,7 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: TIMESTAMP'2012-02-11 02:30:00' (type: timestamp)
+            expressions: TIMESTAMP'2012-02-11 02:30:00.0' (type: timestamp)
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
             ListSink
@@ -94,4 +94,4 @@ from_utc_timestamp('2012-02-11-04:30:00', 'PST')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-2012-02-11 00:00:00	2012-02-10 16:00:00
+NULL	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_mask.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_mask.q.out b/ql/src/test/results/clientpositive/udf_mask.q.out
index ca201fb..e5e2344 100644
--- a/ql/src/test/results/clientpositive/udf_mask.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask.q.out
@@ -84,4 +84,4 @@ POSTHOOK: query: select mask('TestString-123', 'X', 'x', '0', ':'),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-XxxxXxxxxx:000	XxxxXxxxxx:000	XxxxXxxxxx:000::::::::::                                                                                                                                                                                                                                       	43	-9981	55555	55555	0001-01-01	0001-01-20	0001-04-01	2016-01-01	2016-04-01	2016-01-20	0001-04-20	2016-04-20
+XxxxXxxxxx:000	XxxxXxxxxx:000	XxxxXxxxxx:000::::::::::                                                                                                                                                                                                                                       	43	-9981	55555	55555	1900-01-01	1900-01-20	1900-04-01	2016-01-01	2016-04-01	2016-01-20	1900-04-20	2016-04-20

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_mask_first_n.q.out b/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
index 2d79ebe..823aca7 100644
--- a/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_first_n('TestString-123', 4, 'X', 'x', '0', ':'),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-XxxxString-123	XxxxString-123	XxxxString-123                                                                                                                                                                                                                                                 	43	-9981	55555	55555	0001-01-01
+XxxxString-123	XxxxString-123	XxxxString-123                                                                                                                                                                                                                                                 	43	-9981	55555	55555	1900-01-01

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_mask_last_n.q.out b/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
index 73e9498..57e74e3 100644
--- a/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_last_n('TestString-123', 4, 'X', 'x', '0', ':'),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-TestString:000	TestString:000	TestString-123      ::::                                                                                                                                                                                                                                       	43	15555	15555	15555	0001-01-01
+TestString:000	TestString:000	TestString-123      ::::                                                                                                                                                                                                                                       	43	15555	15555	15555	1900-01-01

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out b/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
index 5e7a382..7d92d63 100644
--- a/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_show_first_n('TestString-123', 4, 'X', 'x', '0', ':
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-TestXxxxxx:000	TestXxxxxx:000	TestXxxxxx:000::::::::::                                                                                                                                                                                                                                       	123	12345	12345	12345	0001-01-01
+TestXxxxxx:000	TestXxxxxx:000	TestXxxxxx:000::::::::::                                                                                                                                                                                                                                       	123	12345	12345	12345	1900-01-01

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out b/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
index c293fa4..2f91394 100644
--- a/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_show_last_n('TestString-123', 4, 'X', 'x', '0', ':'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
-XxxxXxxxxx-123	XxxxXxxxxx-123	XxxxXxxxxx:000::::::                                                                                                                                                                                                                                           	123	-13191	52345	52345	0001-01-01
+XxxxXxxxxx-123	XxxxXxxxxx-123	XxxxXxxxxx:000::::::                                                                                                                                                                                                                                           	123	-13191	52345	52345	1900-01-01

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_reflect2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_reflect2.q.out b/ql/src/test/results/clientpositive/udf_reflect2.q.out
index af9015a..4834cd6 100644
--- a/ql/src/test/results/clientpositive/udf_reflect2.q.out
+++ b/ql/src/test/results/clientpositive/udf_reflect2.q.out
@@ -43,7 +43,7 @@ SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN EXTENDED
@@ -77,7 +77,7 @@ SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -93,7 +93,7 @@ STAGE PLANS:
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           GatherStats: false
           Select Operator
-            expressions: UDFToInteger(key) (type: int), reflect2(UDFToInteger(key),'byteValue') (type: tinyint), reflect2(UDFToInteger(key),'shortValue') (type: smallint), reflect2(UDFToInteger(key),'intValue') (type: int), reflect2(UDFToInteger(key),'longValue') (type: bigint), reflect2(UDFToInteger(key),'floatValue') (type: float), reflect2(UDFToInteger(key),'doubleValue') (type: double), reflect2(UDFToInteger(key),'toString') (type: string), value (type: string), reflect2(value,'concat','_concat') (type: string), reflect2(value,'contains','86') (type: boolean), reflect2(value,'startsWith','v') (type: boolean), reflect2(value,'endsWith','6') (type: boolean), reflect2(value,'equals','val_86') (type: boolean), reflect2(value,'equalsIgnoreCase','VAL_86') (type: boolean), reflect2(value,'getBytes') (type: binary), reflect2(value,'indexOf','1') (type: int), reflect2(value,'lastIndexOf','1') (type: int), reflect2(value,'replace','val','VALUE') (type: string), reflect2(value,'substring',
 1) (type: string), reflect2(value,'substring',1,5) (type: string), reflect2(value,'toUpperCase') (type: string), reflect2(value,'trim') (type: string), TIMESTAMP'2013-02-15 19:41:20' (type: timestamp), 2013 (type: int), 2 (type: int), 15 (type: int), 19 (type: int), 41 (type: int), 20 (type: int), 1360957280000L (type: bigint)
+            expressions: UDFToInteger(key) (type: int), reflect2(UDFToInteger(key),'byteValue') (type: tinyint), reflect2(UDFToInteger(key),'shortValue') (type: smallint), reflect2(UDFToInteger(key),'intValue') (type: int), reflect2(UDFToInteger(key),'longValue') (type: bigint), reflect2(UDFToInteger(key),'floatValue') (type: float), reflect2(UDFToInteger(key),'doubleValue') (type: double), reflect2(UDFToInteger(key),'toString') (type: string), value (type: string), reflect2(value,'concat','_concat') (type: string), reflect2(value,'contains','86') (type: boolean), reflect2(value,'startsWith','v') (type: boolean), reflect2(value,'endsWith','6') (type: boolean), reflect2(value,'equals','val_86') (type: boolean), reflect2(value,'equalsIgnoreCase','VAL_86') (type: boolean), reflect2(value,'getBytes') (type: binary), reflect2(value,'indexOf','1') (type: int), reflect2(value,'lastIndexOf','1') (type: int), reflect2(value,'replace','val','VALUE') (type: string), reflect2(value,'substring',
 1) (type: string), reflect2(value,'substring',1,5) (type: string), reflect2(value,'toUpperCase') (type: string), reflect2(value,'trim') (type: string), TIMESTAMP'2013-02-15 19:41:20.0' (type: timestamp), 113 (type: int), 1 (type: int), 5 (type: int), 19 (type: int), 41 (type: int), 20 (type: int), 1360986080000L (type: bigint)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Limit
@@ -131,7 +131,7 @@ PREHOOK: query: SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -166,13 +166,13 @@ POSTHOOK: query: SELECT key,
        reflect2(ts, "getHours"),
        reflect2(ts, "getMinutes"),
        reflect2(ts, "getSeconds"),
-       reflect2(ts, "toEpochMilli")
+       reflect2(ts, "getTime")
 FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-238	-18	238	238	238	238.0	238.0	238	val_238	val_238_concat	false	true	false	false	false	val_238	-1	-1	VALUE_238	al_238	al_2	VAL_238	val_238	2013-02-15 19:41:20	2013	2	15	19	41	20	1360957280000
-86	86	86	86	86	86.0	86.0	86	val_86	val_86_concat	true	true	true	true	true	val_86	-1	-1	VALUE_86	al_86	al_8	VAL_86	val_86	2013-02-15 19:41:20	2013	2	15	19	41	20	1360957280000
-311	55	311	311	311	311.0	311.0	311	val_311	val_311_concat	false	true	false	false	false	val_311	5	6	VALUE_311	al_311	al_3	VAL_311	val_311	2013-02-15 19:41:20	2013	2	15	19	41	20	1360957280000
-27	27	27	27	27	27.0	27.0	27	val_27	val_27_concat	false	true	false	false	false	val_27	-1	-1	VALUE_27	al_27	al_2	VAL_27	val_27	2013-02-15 19:41:20	2013	2	15	19	41	20	1360957280000
-165	-91	165	165	165	165.0	165.0	165	val_165	val_165_concat	false	true	false	false	false	val_165	4	4	VALUE_165	al_165	al_1	VAL_165	val_165	2013-02-15 19:41:20	2013	2	15	19	41	20	1360957280000
+238	-18	238	238	238	238.0	238.0	238	val_238	val_238_concat	false	true	false	false	false	val_238	-1	-1	VALUE_238	al_238	al_2	VAL_238	val_238	2013-02-15 19:41:20	113	1	5	19	41	20	1360986080000
+86	86	86	86	86	86.0	86.0	86	val_86	val_86_concat	true	true	true	true	true	val_86	-1	-1	VALUE_86	al_86	al_8	VAL_86	val_86	2013-02-15 19:41:20	113	1	5	19	41	20	1360986080000
+311	55	311	311	311	311.0	311.0	311	val_311	val_311_concat	false	true	false	false	false	val_311	5	6	VALUE_311	al_311	al_3	VAL_311	val_311	2013-02-15 19:41:20	113	1	5	19	41	20	1360986080000
+27	27	27	27	27	27.0	27.0	27	val_27	val_27_concat	false	true	false	false	false	val_27	-1	-1	VALUE_27	al_27	al_2	VAL_27	val_27	2013-02-15 19:41:20	113	1	5	19	41	20	1360986080000
+165	-91	165	165	165	165.0	165.0	165	val_165	val_165_concat	false	true	false	false	false	val_165	4	4	VALUE_165	al_165	al_1	VAL_165	val_165	2013-02-15 19:41:20	113	1	5	19	41	20	1360986080000

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out b/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
index 215968b..071a456 100644
--- a/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_to_unix_timestamp.q.out
@@ -41,7 +41,7 @@ FROM oneline_n0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline_n0
 #### A masked pattern was here ####
-2009-03-20 11:30:01	1237548601
+2009-03-20 11:30:01	1237573801
 PREHOOK: query: SELECT
   '2009-03-20',
   to_unix_timestamp('2009-03-20', 'yyyy-MM-dd')
@@ -56,7 +56,7 @@ FROM oneline_n0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline_n0
 #### A masked pattern was here ####
-2009-03-20	1237507200
+2009-03-20	1237532400
 PREHOOK: query: SELECT
   '2009 Mar 20 11:30:01 am',
   to_unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
@@ -71,7 +71,7 @@ FROM oneline_n0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline_n0
 #### A masked pattern was here ####
-2009 Mar 20 11:30:01 am	1237548601
+2009 Mar 20 11:30:01 am	1237573801
 PREHOOK: query: SELECT
   'random_string',
   to_unix_timestamp('random_string')

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_to_utc_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_to_utc_timestamp.q.out b/ql/src/test/results/clientpositive/udf_to_utc_timestamp.q.out
index 7264a94..4abf0ed 100644
--- a/ql/src/test/results/clientpositive/udf_to_utc_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_to_utc_timestamp.q.out
@@ -27,7 +27,7 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: TIMESTAMP'2012-02-11 18:30:00' (type: timestamp)
+            expressions: TIMESTAMP'2012-02-11 18:30:00.0' (type: timestamp)
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
             ListSink

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out b/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
index e768eca..c86a85c 100644
--- a/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/udf_unix_timestamp.q.out
@@ -41,7 +41,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009-03-20 11:30:01	1237548601
+2009-03-20 11:30:01	1237573801
 PREHOOK: query: SELECT
   '2009-03-20',
   unix_timestamp('2009-03-20', 'yyyy-MM-dd')
@@ -56,7 +56,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009-03-20	1237507200
+2009-03-20	1237532400
 PREHOOK: query: SELECT
   '2009 Mar 20 11:30:01 am',
   unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
@@ -71,7 +71,7 @@ FROM oneline
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@oneline
 #### A masked pattern was here ####
-2009 Mar 20 11:30:01 am	1237548601
+2009 Mar 20 11:30:01 am	1237573801
 unix_timestamp(void) is deprecated. Use current_timestamp instead.
 unix_timestamp(void) is deprecated. Use current_timestamp instead.
 PREHOOK: query: create table foo_n3 as SELECT

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/update_all_types.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/update_all_types.q.out b/ql/src/test/results/clientpositive/update_all_types.q.out
index 9fb8d3f..db9f166 100644
--- a/ql/src/test/results/clientpositive/update_all_types.q.out
+++ b/ql/src/test/results/clientpositive/update_all_types.q.out
@@ -147,7 +147,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@acid_uat
 #### A masked pattern was here ####
 -51	NULL	-1071480828	-1071480828	-1401575336	-51.0	NULL	-51.00	1969-12-31 16:00:08.451	NULL	aw724t8c5558x2xneC624	aw724t8c5558x2xneC624	4uE7l74tESBiKfu7c8wM7GA             	true
-1	2	-1070883071	3	4	3.14	6.28	5.99	2014-09-01 00:00:00	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
+1	2	-1070883071	3	4	3.14	6.28	5.99	NULL	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
 11	NULL	-1069736047	-1069736047	-453772520	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	k17Am8uPHWk02cEf1jet	k17Am8uPHWk02cEf1jet	qrXLLNX1                            	true
 11	NULL	-1072910839	-1072910839	2048385991	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	0iqrc5	0iqrc5	KbaDXiN85adbHRx58v                  	false
 11	NULL	-1073279343	-1073279343	-1595604468	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	oj1YrV5Wa	oj1YrV5Wa	P76636jJ6qM17d7DIy                  	true
@@ -181,7 +181,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@acid_uat
 #### A masked pattern was here ####
 -102	-51	-1071480828	-1071480828	-1401575336	-51.0	-51.0	-51.00	1969-12-31 16:00:08.451	NULL	aw724t8c5558x2xneC624	aw724t8c5558x2xneC624	4uE7l74tESBiKfu7c8wM7GA             	true
-1	2	-1070883071	3	4	3.14	6.28	5.99	2014-09-01 00:00:00	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
+1	2	-1070883071	3	4	3.14	6.28	5.99	NULL	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
 11	NULL	-1069736047	-1069736047	-453772520	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	k17Am8uPHWk02cEf1jet	k17Am8uPHWk02cEf1jet	qrXLLNX1                            	true
 11	NULL	-1072910839	-1072910839	2048385991	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	0iqrc5	0iqrc5	KbaDXiN85adbHRx58v                  	false
 11	NULL	-1073279343	-1073279343	-1595604468	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	oj1YrV5Wa	oj1YrV5Wa	P76636jJ6qM17d7DIy                  	true

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
index 198f688..9487881 100644
--- a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
+++ b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
@@ -418,4 +418,4 @@ POSTHOOK: query: select min(ts), max(ts), sum(ts), avg(ts) from vectortab2korc_n
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@vectortab2korc_n4
 #### A masked pattern was here ####
-2013-02-18 21:06:48	2081-02-22 01:21:53	4.591334884281E12	2.4254278311045957E9
+2013-02-18 21:06:48	2081-02-22 01:21:53	4.591384881081E12	2.4254542425150557E9

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
index 6c3c3d3..ac9ef5c 100644
--- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
@@ -296,7 +296,7 @@ order by k
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hundredorc
 #### A masked pattern was here ####
--8303557760
+-27832781952
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT count(*), bin
 FROM hundredorc


[11/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
index 717337c..e50ab30 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
@@ -41,17 +41,17 @@ STAGE PLANS:
                 TableScan
                   alias: orc_merge5_n4
                   filterExpr: (userid <= 13L) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (userid <= 13L) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                             output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -121,10 +121,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n1
@@ -175,17 +179,17 @@ STAGE PLANS:
                 TableScan
                   alias: orc_merge5_n4
                   filterExpr: (userid <= 13L) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (userid <= 13L) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                             output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -293,9 +297,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n1
@@ -368,10 +372,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n1
@@ -464,9 +472,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
index 5f7f8c1..aa2f8bd 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
@@ -40,14 +40,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_merge5_n2
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                           output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -153,9 +153,10 @@ POSTHOOK: Input: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        603 ### HDFS DATE ### hdfs://### HDFS PATH ###
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 2 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n0
@@ -208,14 +209,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_merge5_n2
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                           output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -358,9 +359,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        971 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n0
@@ -473,9 +474,10 @@ POSTHOOK: Input: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        603 ### HDFS DATE ### hdfs://### HDFS PATH ###
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 2 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n0
@@ -570,9 +572,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        971 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a_n0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge8.q.out b/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
index 5361ba9..26b0155 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
@@ -117,10 +117,10 @@ POSTHOOK: Lineage: alltypes_orc_n1.ti SIMPLE [(alltypes_n1)alltypes_n1.FieldSche
 POSTHOOK: Lineage: alltypes_orc_n1.ts SIMPLE [(alltypes_n1)alltypes_n1.FieldSchema(name:ts, type:timestamp, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc_n1.vc SIMPLE [(alltypes_n1)alltypes_n1.FieldSchema(name:vc, type:varchar(5), comment:null), ]
 Found 4 items
--rw-r--r--   3 ### USER ### ### GROUP ###       1637 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###       1637 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###       1652 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###       1652 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1665 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1665 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: alter table alltypes_orc_n1 concatenate
 PREHOOK: type: ALTER_TABLE_MERGE
 PREHOOK: Input: default@alltypes_orc_n1
@@ -130,4 +130,4 @@ POSTHOOK: type: ALTER_TABLE_MERGE
 POSTHOOK: Input: default@alltypes_orc_n1
 POSTHOOK: Output: default@alltypes_orc_n1
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###       4648 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       4695 ### HDFS DATE ### hdfs://### HDFS PATH ###

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge9.q.out b/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
index 6a3bb8b..4206f75 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
@@ -35,8 +35,8 @@ POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@ts_merge
 Found 2 items
--rw-r--r--   3 ### USER ### ### GROUP ###       2298 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###       2298 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     246402 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     246402 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select count(*) from ts_merge
 PREHOOK: type: QUERY
 PREHOOK: Input: default@ts_merge
@@ -64,7 +64,7 @@ POSTHOOK: Input: default@ts_merge
 POSTHOOK: Output: hdfs://### HDFS PATH ###
 50000
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###       4260 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     457164 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: create table a_merge like alltypesorc
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -102,7 +102,7 @@ POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@a_merge
 Found 2 items
--rw-r--r--   3 ### USER ### ### GROUP ###     295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     295711 ### HDFS DATE ### hdfs://### HDFS PATH ###
 -rw-r--r--   3 ### USER ### ### GROUP ###     295616 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select count(*) from a_merge
 PREHOOK: type: QUERY
@@ -131,7 +131,7 @@ POSTHOOK: Input: default@a_merge
 POSTHOOK: Output: hdfs://### HDFS PATH ###
 24576
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###     590716 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     590654 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: insert into table a_merge select * from alltypesorc
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc
@@ -153,8 +153,8 @@ POSTHOOK: Lineage: a_merge.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSch
 POSTHOOK: Lineage: a_merge.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ]
 POSTHOOK: Lineage: a_merge.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ]
 Found 2 items
--rw-r--r--   3 ### USER ### ### GROUP ###     590716 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###     295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     590654 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     295711 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select count(*) from a_merge
 PREHOOK: type: QUERY
 PREHOOK: Input: default@a_merge
@@ -182,4 +182,4 @@ POSTHOOK: Input: default@a_merge
 POSTHOOK: Output: hdfs://### HDFS PATH ###
 36864
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###     885665 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###     885549 ### HDFS DATE ### hdfs://### HDFS PATH ###

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
index 1b28976..f01c368 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
@@ -40,17 +40,17 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_merge5_n3
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (userid <= 13L) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                             output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -158,12 +158,12 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b
 POSTHOOK: Output: default@orc_merge5b
 Found 6 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
 -rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
 -rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
 -rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b
@@ -207,10 +207,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b
 POSTHOOK: Output: default@orc_merge5b
 Found 4 items
--rw-r--r--   3 ### USER ### ### GROUP ###       1320 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        679 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1353 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        690 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
index 03e420c..617b873 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
@@ -40,14 +40,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: orc_merge5
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                           output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -215,15 +215,15 @@ POSTHOOK: Input: default@orc_merge5a
 POSTHOOK: Output: default@orc_merge5a
 POSTHOOK: Output: default@orc_merge5a@st=0.8
 Found 4 items
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 4 items
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a
@@ -327,13 +327,13 @@ POSTHOOK: Input: default@orc_merge5a
 POSTHOOK: Output: default@orc_merge5a
 POSTHOOK: Output: default@orc_merge5a@st=0.8
 Found 3 items
--rw-r--r--   3 ### USER ### ### GROUP ###        898 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        602 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        920 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        614 ### HDFS DATE ### hdfs://### HDFS PATH ###
 Found 3 items
--rw-r--r--   3 ### USER ### ### GROUP ###        956 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r--   3 ### USER ### ### GROUP ###        645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        978 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        656 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: show partitions orc_merge5a
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@orc_merge5a

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out b/ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out
index 15eb73b..bce1f8a 100644
--- a/ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -92,8 +92,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -268,8 +268,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -302,8 +302,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -377,8 +377,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -410,8 +410,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -444,8 +444,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -596,8 +596,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -630,8 +630,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out b/ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out
index 7a41e77..52b8126 100644
--- a/ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -79,8 +79,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-                    predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+                    predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -182,11 +182,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -210,11 +210,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -291,11 +291,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -326,8 +326,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-                    predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+                    predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -414,11 +414,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -442,11 +442,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/parquet_vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/parquet_vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/spark/parquet_vectorization_decimal_date.q.out
index 0395b69..2cbc466 100644
--- a/ql/src/test/results/clientpositive/spark/parquet_vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/spark/parquet_vectorization_decimal_date.q.out
@@ -92,13 +92,13 @@ POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test_parquet where cin
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test_parquet
 #### A masked pattern was here ####
-1970-01-07	-7959.5837837838
-1970-01-07	-2516.4135135135
-1970-01-07	-9445.0621621622
-1970-01-07	-5713.7459459459
-1970-01-07	8963.6405405405
-1970-01-07	4193.6243243243
-1970-01-07	2964.3864864865
-1970-01-07	-4673.2540540541
-1970-01-07	-9216.8945945946
-1970-01-07	-9287.3756756757
+1970-01-06	-7959.5837837838
+1970-01-06	-2516.4135135135
+1970-01-06	-9445.0621621622
+1970-01-06	-5713.7459459459
+1970-01-06	8963.6405405405
+1970-01-06	4193.6243243243
+1970-01-06	2964.3864864865
+1970-01-06	-4673.2540540541
+1970-01-06	-9216.8945945946
+1970-01-06	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_1.q.out b/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
index 8221d1b..fab69ec 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
@@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436611E9
+1.2938724611E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661001E9
+1.293872461001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_2.q.out b/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
index ddd1ef1..9a05dfe 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
@@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436611E9
+1.2938724611E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-1.293843661001E9
+1.293872461001E9
 PREHOOK: query: select cast(t as string) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_3.q.out b/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
index 269ed86..6d59269 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
@@ -100,7 +100,7 @@ POSTHOOK: query: select cast(t as string) from timestamp_3 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-2011-04-30 03:46:56.4485
+2011-04-29 20:46:56.4485
 PREHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timestamp_3 group by t
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3
@@ -109,7 +109,7 @@ POSTHOOK: query: select t, sum(t), count(*), sum(t)/count(*), avg(t) from timest
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-2011-04-30 03:46:56.4485	1.3041352164485E9	1	1.3041352164485E9	1.3041352164485E9
+2011-04-29 20:46:56.4485	1.3041352164485E9	1	1.3041352164485E9	1.3041352164485E9
 PREHOOK: query: drop table timestamp_3
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@timestamp_3

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/timestamp_udf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_udf.q.out b/ql/src/test/results/clientpositive/spark/timestamp_udf.q.out
index 201e4da..452f366 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_udf.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_udf.q.out
@@ -54,7 +54,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_udf_n0
 #### A masked pattern was here ####
-1304665689	2011	5	6	6	18	7	8	9	2011-05-06
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
 PREHOOK: query: select date_add(t, 5), date_sub(t, 10)
   from timestamp_udf_n0
 PREHOOK: type: QUERY
@@ -155,7 +155,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_udf_string
 #### A masked pattern was here ####
-1304665689	2011	5	6	6	18	7	8	9	2011-05-06
+1304690889	2011	5	6	6	18	7	8	9	2011-05-06
 PREHOOK: query: select date_add(t, 5), date_sub(t, 10)  from timestamp_udf_string
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_udf_string

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
index 5909342..8390a6a 100644
--- a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
@@ -843,7 +843,7 @@ POSTHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdate NOT IN (CAST
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_date_test
 #### A masked pattern was here ####
-6022
+6026
 PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) ORDER BY cdecimal1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_date_test
@@ -887,7 +887,36 @@ POSTHOOK: Input: default@decimal_date_test
 1969-12-30
 1969-12-30
 1969-12-30
-1969-12-30
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
 1969-12-31
 1969-12-31
 1969-12-31
@@ -916,37 +945,8 @@ POSTHOOK: Input: default@decimal_date_test
 1970-01-01
 1970-01-01
 1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
+1970-01-02
+1970-01-02
 1970-01-02
 1970-01-02
 1970-01-02
@@ -970,40 +970,40 @@ POSTHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate NOT BETWEEN CAS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_date_test
 #### A masked pattern was here ####
-1968-04-07
+1968-04-06
 1968-04-08
 1968-04-09
 1968-04-13
-1968-04-16
-1968-04-16
+1968-04-15
+1968-04-15
 1968-04-18
 1968-04-22
+1968-04-24
 1968-04-25
-1968-04-25
-1968-04-27
-1968-04-27
-1968-04-27
+1968-04-26
+1968-04-26
+1968-04-26
+1968-04-28
 1968-04-28
 1968-04-28
 1968-04-28
-1968-04-29
 1968-04-29
 1968-04-30
 1971-09-02
-1971-09-05
+1971-09-04
 1971-09-06
 1971-09-06
 1971-09-06
 1971-09-09
 1971-09-09
 1971-09-15
+1971-09-17
 1971-09-18
-1971-09-19
+1971-09-21
 1971-09-21
 1971-09-21
 1971-09-22
 1971-09-22
-1971-09-23
 1971-09-25
 PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 BETWEEN -20 AND 45.9918918919 ORDER BY cdecimal1
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vector_data_types.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out
index 6eacd69..fc44d8b 100644
--- a/ql/src/test/results/clientpositive/spark/vector_data_types.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_data_types.q.out
@@ -205,7 +205,7 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
@@ -445,4 +445,4 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc_n1
 #### A masked pattern was here ####
--25838728092
+-17045922556

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
index b32b533..241098e 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -92,8 +92,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -268,8 +268,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -302,8 +302,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -377,8 +377,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -410,8 +410,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -444,8 +444,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -596,8 +596,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -630,8 +630,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16


[33/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

This reverts commit b8fda81c44dd5950c09f277672691025af2369b0.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/33088de0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/33088de0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/33088de0

Branch: refs/heads/master
Commit: 33088de0efd40a456eb4b5a327d57620c11128ff
Parents: 2277661
Author: Matt McCline <mm...@hortonworks.com>
Authored: Mon Jun 25 05:37:43 2018 -0500
Committer: Matt McCline <mm...@hortonworks.com>
Committed: Mon Jun 25 05:37:43 2018 -0500

----------------------------------------------------------------------
 .../hive/accumulo/mr/TestHiveAccumuloTypes.java |  23 +-
 .../predicate/TestAccumuloRangeGenerator.java   |   2 +-
 .../apache/hadoop/hive/common/type/Date.java    | 181 -----
 .../hadoop/hive/common/type/Timestamp.java      | 235 ------
 .../hive/common/type/TimestampTZUtil.java       |  16 +-
 .../hadoop/hive/common/type/TimestampUtils.java | 171 -----
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +-
 .../org/apache/hive/common/util/DateParser.java |  24 +-
 .../org/apache/hive/common/util/DateUtils.java  |   2 -
 .../hive/common/util/TimestampParser.java       |  36 +-
 .../TestHiveDecimalOrcSerializationUtils.java   |   8 +
 .../hive/common/type/TestTimestampTZ.java       |   1 +
 .../apache/hive/common/util/TestDateParser.java |   7 +-
 .../hive/common/util/TestTimestampParser.java   |  27 +-
 .../hadoop/hive/druid/serde/DruidSerDe.java     |  77 +-
 .../hadoop/hive/druid/serde/TestDruidSerDe.java |  27 +-
 .../hadoop/hive/hbase/HBaseRowSerializer.java   |   2 +-
 .../hive/hbase/HiveHBaseInputFormatUtil.java    |   3 +-
 .../apache/hadoop/hive/hbase/LazyHBaseRow.java  |   4 +-
 .../test/results/positive/hbase_timestamp.q.out |  64 +-
 .../org/apache/hive/hcatalog/data/DataType.java |   4 +-
 .../apache/hive/hcatalog/data/HCatRecord.java   |   4 +-
 .../apache/hive/hcatalog/data/JsonSerDe.java    |   4 +-
 .../apache/hive/hcatalog/data/ReaderWriter.java |  15 +-
 .../hcatalog/data/TestDefaultHCatRecord.java    |   8 +-
 .../hive/hcatalog/data/TestJsonSerDe.java       |   6 +-
 .../hive/hcatalog/pig/HCatBaseStorer.java       |   8 +-
 .../apache/hive/hcatalog/pig/HCatLoader.java    |   1 -
 .../apache/hive/hcatalog/pig/PigHCatUtil.java   |   9 +-
 .../hcatalog/pig/AbstractHCatLoaderTest.java    |  20 +-
 .../hcatalog/pig/AbstractHCatStorerTest.java    |  16 +-
 .../vectorization/ColumnVectorGenUtil.java      |  11 +-
 .../apache/hive/jdbc/BaseJdbcWithMiniLlap.java  |   4 +-
 .../hive/jdbc/TestJdbcWithMiniLlapArrow.java    |   4 +-
 .../test/resources/testconfiguration.properties |   1 -
 .../hive/llap/io/TestChunkedInputStream.java    |   4 +-
 .../io/decode/GenericColumnVectorProducer.java  |   2 +-
 .../llap/io/decode/OrcEncodedDataConsumer.java  |   3 +-
 .../DTIColumnCompareScalar.txt                  |   4 +-
 .../DTIScalarCompareColumn.txt                  |   4 +-
 ...eColumnArithmeticIntervalYearMonthColumn.txt |  28 +-
 ...eColumnArithmeticIntervalYearMonthScalar.txt |  26 +-
 .../DateColumnArithmeticTimestampColumn.txt     |  14 +-
 .../DateColumnArithmeticTimestampScalar.txt     |  16 +-
 ...eScalarArithmeticIntervalYearMonthColumn.txt |  16 +-
 .../DateScalarArithmeticTimestampColumn.txt     |   6 +-
 .../FilterColumnBetweenDynamicValue.txt         |   2 +-
 ...terTimestampColumnCompareTimestampScalar.txt |   2 +-
 ...terTimestampScalarCompareTimestampColumn.txt |   2 +-
 ...ervalYearMonthColumnArithmeticDateColumn.txt |  28 +-
 ...ervalYearMonthColumnArithmeticDateScalar.txt |  16 +-
 ...YearMonthColumnArithmeticTimestampScalar.txt |   2 +-
 ...ervalYearMonthScalarArithmeticDateColumn.txt |  26 +-
 .../LongDoubleColumnCompareTimestampScalar.txt  |   2 +-
 .../TimestampColumnArithmeticDateColumn.txt     |  14 +-
 .../TimestampColumnArithmeticDateScalar.txt     |   6 +-
 ...TimestampColumnArithmeticTimestampScalar.txt |   2 +-
 .../TimestampColumnCompareTimestampScalar.txt   |   2 +-
 .../TimestampScalarArithmeticDateColumn.txt     |  16 +-
 ...pScalarArithmeticIntervalYearMonthColumn.txt |   2 +-
 ...TimestampScalarArithmeticTimestampColumn.txt |   2 +-
 .../TimestampScalarCompareTimestampColumn.txt   |   2 +-
 .../UDAFTemplates/VectorUDAFAvgTimestamp.txt    |   2 +-
 .../UDAFTemplates/VectorUDAFMinMaxTimestamp.txt |   2 +-
 .../hive/ql/exec/ColumnStatsUpdateTask.java     |   4 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |  14 +-
 .../hive/ql/exec/vector/TimestampUtils.java     |  29 +-
 .../hive/ql/exec/vector/VectorAssignRow.java    |  24 +-
 .../hive/ql/exec/vector/VectorBatchDebug.java   |   7 +-
 .../exec/vector/VectorColumnAssignFactory.java  |  19 +-
 .../ql/exec/vector/VectorDeserializeRow.java    |  14 +-
 .../hive/ql/exec/vector/VectorExtractRow.java   |  15 +-
 .../ql/exec/vector/VectorHashKeyWrapper.java    |   5 +-
 .../hive/ql/exec/vector/VectorSerializeRow.java |   6 +-
 .../ql/exec/vector/VectorizationContext.java    |  27 +-
 .../ql/exec/vector/VectorizedBatchUtil.java     |  41 +-
 .../ql/exec/vector/VectorizedRowBatchCtx.java   |  25 +-
 .../vector/expressions/CastDateToString.java    |  13 +-
 .../vector/expressions/CastDateToTimestamp.java |   4 +-
 .../exec/vector/expressions/CastLongToDate.java |   1 +
 .../vector/expressions/CastLongToTimestamp.java |   2 +
 .../CastMillisecondsLongToTimestamp.java        |   2 +
 .../vector/expressions/CastStringToDate.java    |  10 +-
 .../vector/expressions/CastTimestampToChar.java |  54 --
 .../vector/expressions/CastTimestampToDate.java |   4 +-
 .../expressions/CastTimestampToDecimal.java     |   7 +-
 .../expressions/CastTimestampToDouble.java      |   2 +
 .../expressions/CastTimestampToString.java      |  68 --
 .../expressions/CastTimestampToVarChar.java     |  55 --
 .../expressions/ConstantVectorExpression.java   |   3 +-
 .../expressions/DateColSubtractDateColumn.java  |  29 +-
 .../expressions/DateColSubtractDateScalar.java  |  22 +-
 .../DateScalarSubtractDateColumn.java           |  18 +-
 .../vector/expressions/DecimalColumnInList.java |   1 +
 .../DynamicValueVectorExpression.java           |   6 +-
 .../expressions/TimestampToStringUnaryUDF.java  |   2 +-
 .../expressions/VectorExpressionWriter.java     |   4 +-
 .../VectorExpressionWriterFactory.java          |  23 +-
 .../expressions/VectorUDFDateAddColCol.java     |  19 +-
 .../expressions/VectorUDFDateAddColScalar.java  |  11 +-
 .../expressions/VectorUDFDateAddScalarCol.java  |  19 +-
 .../expressions/VectorUDFDateDiffColCol.java    |  19 +-
 .../expressions/VectorUDFDateDiffColScalar.java |  12 +-
 .../expressions/VectorUDFDateDiffScalarCol.java |  10 +-
 .../vector/expressions/VectorUDFDateLong.java   |   8 +
 .../vector/expressions/VectorUDFDateString.java |  11 +
 .../expressions/VectorUDFDateTimestamp.java     |   9 +
 .../expressions/VectorUDFDayOfWeekString.java   |   4 +-
 .../VectorUDFTimestampFieldDate.java            |   8 +-
 .../VectorUDFTimestampFieldString.java          |   4 +-
 .../VectorUDFTimestampFieldTimestamp.java       |   4 +-
 .../expressions/VectorUDFUnixTimeStampDate.java |   6 +-
 .../VectorUDFUnixTimeStampString.java           |  12 +-
 .../ql/exec/vector/udf/VectorUDFAdaptor.java    |  20 +
 .../hadoop/hive/ql/io/BatchToRowReader.java     |  34 +-
 .../apache/hadoop/hive/ql/io/orc/OrcFile.java   |  12 -
 .../hadoop/hive/ql/io/orc/RecordReaderImpl.java |  33 +-
 .../hadoop/hive/ql/io/orc/WriterImpl.java       |  15 +-
 .../ql/io/parquet/convert/ETypeConverter.java   |  18 +-
 .../serde/primitive/ParquetStringInspector.java |   8 +-
 .../ql/io/parquet/timestamp/NanoTimeUtils.java  |   8 +-
 .../parquet/vector/ParquetDataColumnReader.java |   2 +-
 .../vector/ParquetDataColumnReaderFactory.java  |   2 +-
 .../vector/VectorizedPrimitiveColumnReader.java |   5 +-
 .../ql/io/parquet/write/DataWritableWriter.java |  12 +-
 .../formatting/MetaDataFormatUtils.java         |   4 +-
 .../hive/ql/optimizer/StatsOptimizer.java       |   4 +-
 .../calcite/translator/ExprNodeConverter.java   |   4 +-
 .../calcite/translator/RexNodeConverter.java    |  17 +-
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  20 +-
 .../hive/ql/parse/TypeCheckProcFactory.java     |   5 +-
 .../ql/stats/ColumnStatisticsObjTranslator.java |   6 +-
 .../apache/hadoop/hive/ql/udf/UDFDateFloor.java |  19 +-
 .../hadoop/hive/ql/udf/UDFDayOfMonth.java       | 136 ++--
 .../apache/hadoop/hive/ql/udf/UDFDayOfWeek.java |  29 +-
 .../hadoop/hive/ql/udf/UDFFromUnixTime.java     |   2 -
 .../org/apache/hadoop/hive/ql/udf/UDFHour.java  | 132 ++--
 .../apache/hadoop/hive/ql/udf/UDFMinute.java    | 132 ++--
 .../org/apache/hadoop/hive/ql/udf/UDFMonth.java | 135 ++--
 .../apache/hadoop/hive/ql/udf/UDFSecond.java    | 136 ++--
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |   8 +-
 .../apache/hadoop/hive/ql/udf/UDFToByte.java    |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToDouble.java  |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToFloat.java   |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToInteger.java |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToLong.java    |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToShort.java   |   5 +-
 .../apache/hadoop/hive/ql/udf/UDFToString.java  |   8 +-
 .../hadoop/hive/ql/udf/UDFWeekOfYear.java       |  32 +-
 .../org/apache/hadoop/hive/ql/udf/UDFYear.java  | 135 ++--
 .../hadoop/hive/ql/udf/generic/BaseMaskUDF.java |   9 +-
 .../ql/udf/generic/GenericUDAFBloomFilter.java  |   9 +-
 .../ql/udf/generic/GenericUDAFComputeStats.java |  16 +-
 .../hadoop/hive/ql/udf/generic/GenericUDF.java  |  79 +-
 .../ql/udf/generic/GenericUDFAddMonths.java     |  39 +-
 .../ql/udf/generic/GenericUDFCurrentDate.java   |  15 +-
 .../udf/generic/GenericUDFCurrentTimestamp.java |  15 +-
 .../hive/ql/udf/generic/GenericUDFDate.java     |  19 +-
 .../hive/ql/udf/generic/GenericUDFDateAdd.java  |  21 +-
 .../hive/ql/udf/generic/GenericUDFDateDiff.java |  31 +-
 .../ql/udf/generic/GenericUDFDateFormat.java    |  24 +-
 .../hive/ql/udf/generic/GenericUDFDateSub.java  |   4 +-
 .../udf/generic/GenericUDFFromUtcTimestamp.java |  47 +-
 .../ql/udf/generic/GenericUDFInBloomFilter.java |   9 +-
 .../hive/ql/udf/generic/GenericUDFLastDay.java  |  24 +-
 .../hive/ql/udf/generic/GenericUDFMask.java     |  10 +-
 .../hive/ql/udf/generic/GenericUDFMaskHash.java |   3 +-
 .../ql/udf/generic/GenericUDFMonthsBetween.java |  29 +-
 .../hive/ql/udf/generic/GenericUDFNextDay.java  |  31 +-
 .../ql/udf/generic/GenericUDFOPDTIMinus.java    |  16 +-
 .../ql/udf/generic/GenericUDFOPDTIPlus.java     |  16 +-
 .../hive/ql/udf/generic/GenericUDFQuarter.java  |   8 +-
 .../hive/ql/udf/generic/GenericUDFReflect2.java |   6 +-
 .../ql/udf/generic/GenericUDFTimestamp.java     |   2 +-
 .../udf/generic/GenericUDFToUnixTimeStamp.java  |  10 +-
 .../hive/ql/udf/generic/GenericUDFTrunc.java    |  59 +-
 .../ql/udf/generic/GenericUDFUnixTimeStamp.java |   3 +-
 .../hive/ql/udf/ptf/ValueBoundaryScanner.java   |  11 +-
 .../hadoop/hive/ql/util/DateTimeMath.java       | 304 ++------
 .../hive/ql/exec/TestFunctionRegistry.java      |  16 +-
 .../hadoop/hive/ql/exec/TestUtilities.java      |   5 +-
 .../ql/exec/persistence/TestRowContainer.java   |   6 +-
 .../TestTimestampWritableAndColumnVector.java   |   4 +-
 .../exec/vector/TestVectorGroupByOperator.java  |  12 +-
 .../exec/vector/TestVectorizationContext.java   |   2 +-
 .../ql/exec/vector/VectorRandomRowSource.java   |  15 +-
 .../hive/ql/exec/vector/VectorVerifyFast.java   |  20 +-
 .../expressions/TestVectorDateExpressions.java  |  58 +-
 .../TestVectorExpressionWriters.java            |  16 +-
 .../TestVectorGenericDateExpressions.java       |  17 +-
 .../expressions/TestVectorMathFunctions.java    |  10 +-
 .../TestVectorTimestampExpressions.java         |  96 +--
 .../expressions/TestVectorTimestampExtract.java |  76 +-
 .../vector/expressions/TestVectorTypeCasts.java |  23 +-
 .../exec/vector/mapjoin/fast/VerifyFastRow.java |  21 +-
 .../vector/util/VectorizedRowGroupGenUtil.java  |   6 +-
 .../batchgen/VectorColumnGroupGenerator.java    |   6 +-
 .../io/arrow/TestArrowColumnarBatchSerDe.java   |  26 +-
 .../hive/ql/io/orc/TestInputOutputFormat.java   |   8 +-
 .../hadoop/hive/ql/io/orc/TestOrcFile.java      |  37 +-
 .../hive/ql/io/orc/TestOrcSerDeStats.java       |  16 +-
 .../hive/ql/io/orc/TestVectorizedORCReader.java |  29 +-
 .../parquet/VectorizedColumnReaderTestBase.java |  23 +-
 .../serde/TestParquetTimestampUtils.java        |  42 +-
 .../hive/ql/parse/TestSemanticAnalyzer.java     |   6 +-
 .../ql/udf/TestUDFDateFormatGranularity.java    |  39 +-
 .../ql/udf/generic/TestGenericUDFAddMonths.java |  52 +-
 .../hive/ql/udf/generic/TestGenericUDFDate.java |  38 +-
 .../ql/udf/generic/TestGenericUDFDateAdd.java   |  42 +-
 .../ql/udf/generic/TestGenericUDFDateDiff.java  |  26 +-
 .../udf/generic/TestGenericUDFDateFormat.java   |  44 +-
 .../ql/udf/generic/TestGenericUDFDateSub.java   |  42 +-
 .../generic/TestGenericUDFFromUtcTimestamp.java |   7 +-
 .../ql/udf/generic/TestGenericUDFGreatest.java  |  11 +-
 .../ql/udf/generic/TestGenericUDFLastDay.java   |  63 +-
 .../ql/udf/generic/TestGenericUDFLeast.java     |  11 +-
 .../generic/TestGenericUDFMonthsBetween.java    |  41 +-
 .../ql/udf/generic/TestGenericUDFNextDay.java   |  32 +-
 .../ql/udf/generic/TestGenericUDFNullif.java    |   8 +-
 .../ql/udf/generic/TestGenericUDFOPMinus.java   |  33 +-
 .../ql/udf/generic/TestGenericUDFOPPlus.java    |  57 +-
 .../ql/udf/generic/TestGenericUDFQuarter.java   |  39 +-
 .../ql/udf/generic/TestGenericUDFSortArray.java |  12 +-
 .../generic/TestGenericUDFToUnixTimestamp.java  |  25 +-
 .../ql/udf/generic/TestGenericUDFTrunc.java     | 171 ++---
 .../hadoop/hive/ql/util/TestDateTimeMath.java   |  26 +-
 .../test/queries/clientnegative/date_literal3.q |   3 +
 ql/src/test/queries/clientpositive/date_udf.q   |   2 +-
 .../queries/clientpositive/druid_timestamptz2.q |  60 --
 .../test/queries/clientpositive/localtimezone.q |  42 +-
 .../queries/clientpositive/localtimezone2.q     |  55 --
 .../queries/clientpositive/parquet_ppd_char.q   |   1 -
 .../clientpositive/parquet_vectorization_13.q   |  16 +-
 .../clientpositive/parquet_vectorization_7.q    |  16 +-
 .../queries/clientpositive/singletsinsertorc.q  |   6 -
 .../clientpositive/timestamp_comparison2.q      |   4 +-
 .../test/queries/clientpositive/timestamp_dst.q |   2 -
 .../test/queries/clientpositive/udf_reflect2.q  |   4 +-
 .../queries/clientpositive/vectorization_13.q   |  16 +-
 .../queries/clientpositive/vectorization_7.q    |  16 +-
 .../clientpositive/vectorization_decimal_date.q |   5 +-
 .../results/clientnegative/date_literal3.q.out  |   1 +
 .../beeline/udf_unix_timestamp.q.out            |   6 +-
 .../clientpositive/cast_on_constant.q.out       |   8 +-
 .../confirm_initial_tbl_stats.q.out             |   6 +-
 .../results/clientpositive/constprog_type.q.out |   4 +-
 .../test/results/clientpositive/date_udf.q.out  |   6 +-
 .../test/results/clientpositive/decimal_1.q.out |   2 +-
 .../test/results/clientpositive/decimal_2.q.out |   2 +-
 .../druid/druid_timestamptz2.q.out              | 308 --------
 ql/src/test/results/clientpositive/foldts.q.out |   2 +-
 .../clientpositive/infer_join_preds.q.out       |   2 +-
 .../results/clientpositive/interval_alt.q.out   |   2 +-
 .../clientpositive/interval_arithmetic.q.out    |   8 +-
 .../llap/acid_vectorization_original.q.out      |  14 +-
 .../llap/current_date_timestamp.q.out           |  34 +-
 .../llap/default_constraint.q.out               |   2 +-
 .../insert_values_orig_table_use_metadata.q.out |  24 +-
 .../clientpositive/llap/llap_uncompressed.q.out |   2 +-
 .../clientpositive/llap/orc_analyze.q.out       |  34 +-
 .../results/clientpositive/llap/orc_llap.q.out  |   4 +-
 .../clientpositive/llap/orc_merge11.q.out       |  44 +-
 .../clientpositive/llap/orc_merge5.q.out        |   4 +-
 .../clientpositive/llap/orc_merge6.q.out        |   8 +-
 .../clientpositive/llap/orc_merge7.q.out        |   4 +-
 .../clientpositive/llap/orc_ppd_date.q.out      |  50 +-
 .../clientpositive/llap/orc_ppd_decimal.q.out   | 100 +--
 .../clientpositive/llap/orc_ppd_timestamp.q.out |  48 +-
 .../clientpositive/llap/orc_ppd_varchar.q.out   |  32 +-
 .../clientpositive/llap/results_cache_2.q.out   |   6 +-
 .../llap/schema_evol_orc_acid_part.q.out        |   8 +-
 .../schema_evol_orc_acid_part_llap_io.q.out     |   8 +-
 .../llap/schema_evol_orc_acid_table.q.out       |   8 +-
 .../schema_evol_orc_acid_table_llap_io.q.out    |   8 +-
 .../schema_evol_orc_acidvec_part_llap_io.q.out  |   8 +-
 .../llap/schema_evol_orc_acidvec_table.q.out    |   8 +-
 .../schema_evol_orc_acidvec_table_llap_io.q.out |   8 +-
 .../llap/schema_evol_orc_nonvec_part.q.out      |   8 +-
 ...chema_evol_orc_nonvec_part_all_complex.q.out |  16 +-
 ...ol_orc_nonvec_part_all_complex_llap_io.q.out |  16 +-
 ...ema_evol_orc_nonvec_part_all_primitive.q.out |  30 +-
 ..._orc_nonvec_part_all_primitive_llap_io.q.out |  30 +-
 .../schema_evol_orc_nonvec_part_llap_io.q.out   |   8 +-
 .../llap/schema_evol_orc_nonvec_table.q.out     |   8 +-
 .../schema_evol_orc_nonvec_table_llap_io.q.out  |   8 +-
 .../llap/schema_evol_orc_vec_part.q.out         |   8 +-
 .../schema_evol_orc_vec_part_all_complex.q.out  |  16 +-
 ..._evol_orc_vec_part_all_complex_llap_io.q.out |  16 +-
 ...schema_evol_orc_vec_part_all_primitive.q.out |  30 +-
 ...vol_orc_vec_part_all_primitive_llap_io.q.out |  30 +-
 .../llap/schema_evol_orc_vec_table.q.out        |   8 +-
 .../schema_evol_orc_vec_table_llap_io.q.out     |   8 +-
 ...ma_evol_text_nonvec_part_all_primitive.q.out |  24 +-
 ...text_nonvec_part_all_primitive_llap_io.q.out |  24 +-
 ...chema_evol_text_vec_part_all_primitive.q.out |  24 +-
 ...ol_text_vec_part_all_primitive_llap_io.q.out |  24 +-
 ...ma_evol_text_vecrow_part_all_primitive.q.out |  24 +-
 .../clientpositive/llap/singletsinsertorc.q.out |  28 -
 .../clientpositive/llap/update_all_types.q.out  |   4 +-
 .../llap/vector_aggregate_9.q.out               |   2 +-
 .../clientpositive/llap/vector_between_in.q.out |  88 +--
 .../llap/vector_binary_join_groupby.q.out       |   2 +-
 .../llap/vector_case_when_2.q.out               | 118 +--
 .../clientpositive/llap/vector_data_types.q.out |   4 +-
 .../clientpositive/llap/vector_decimal_1.q.out  |   2 +-
 .../clientpositive/llap/vector_decimal_2.q.out  |   6 +-
 .../llap/vector_decimal_cast.q.out              |  40 +-
 .../llap/vector_decimal_expressions.q.out       |  40 +-
 .../clientpositive/llap/vector_interval_1.q.out |  10 +-
 .../clientpositive/llap/vector_interval_2.q.out |  12 +-
 .../llap/vector_interval_arithmetic.q.out       | 272 +++----
 .../llap/vector_partitioned_date_time.q.out     |  56 +-
 .../llap/vector_ptf_part_simple.q.out           | 108 +--
 .../llap/vector_udf_adaptor_1.q.out             |  20 +-
 .../clientpositive/llap/vectorization_13.q.out  |  40 +-
 .../clientpositive/llap/vectorization_7.q.out   |  40 +-
 .../llap/vectorization_decimal_date.q.out       |  34 +-
 .../llap/vectorization_short_regress.q.out      |   8 +-
 .../clientpositive/llap/vectorized_casts.q.out  |  54 +-
 .../llap/vectorized_date_funcs.q.out            | 554 +++++++-------
 .../llap/vectorized_timestamp.q.out             |  12 +-
 .../llap/vectorized_timestamp_funcs.q.out       |  58 +-
 .../llap/vectorized_timestamp_ints_casts.q.out  | 104 +--
 .../clientpositive/llap_uncompressed.q.out      |   2 +-
 .../results/clientpositive/localtimezone.q.out  |  84 +--
 .../results/clientpositive/localtimezone2.q.out | 148 ----
 .../results/clientpositive/orc_file_dump.q.out  |  12 +-
 .../results/clientpositive/orc_merge11.q.out    |  44 +-
 .../results/clientpositive/orc_merge12.q.out    | 644 ++++++++--------
 .../results/clientpositive/orc_merge5.q.out     |  24 +-
 .../results/clientpositive/orc_merge6.q.out     |  48 +-
 .../clientpositive/orc_merge_incompat1.q.out    |  10 +-
 .../clientpositive/orc_merge_incompat2.q.out    |  18 +-
 .../results/clientpositive/orc_ppd_char.q.out   |  36 +-
 .../parquet_vectorization_13.q.out              |  40 +-
 .../parquet_vectorization_7.q.out               |  40 +-
 .../parquet_vectorization_decimal_date.q.out    |  20 +-
 .../clientpositive/partition_timestamp.q.out    |  60 +-
 .../clientpositive/partition_timestamp2.q.out   | 250 +++----
 .../clientpositive/perf/spark/query12.q.out     |   2 +-
 .../clientpositive/perf/spark/query16.q.out     |   2 +-
 .../clientpositive/perf/spark/query20.q.out     |   2 +-
 .../clientpositive/perf/spark/query21.q.out     |   2 +-
 .../clientpositive/perf/spark/query32.q.out     |   4 +-
 .../clientpositive/perf/spark/query37.q.out     |   2 +-
 .../clientpositive/perf/spark/query40.q.out     |   2 +-
 .../clientpositive/perf/spark/query5.q.out      |   4 +-
 .../clientpositive/perf/spark/query77.q.out     |  10 +-
 .../clientpositive/perf/spark/query80.q.out     |   6 +-
 .../clientpositive/perf/spark/query82.q.out     |   2 +-
 .../clientpositive/perf/spark/query92.q.out     |   4 +-
 .../clientpositive/perf/spark/query94.q.out     |   2 +-
 .../clientpositive/perf/spark/query95.q.out     |   2 +-
 .../clientpositive/perf/spark/query98.q.out     |   2 +-
 .../clientpositive/perf/tez/query12.q.out       |   2 +-
 .../clientpositive/perf/tez/query16.q.out       |   2 +-
 .../clientpositive/perf/tez/query20.q.out       |   2 +-
 .../clientpositive/perf/tez/query21.q.out       |   2 +-
 .../clientpositive/perf/tez/query32.q.out       |   2 +-
 .../clientpositive/perf/tez/query37.q.out       |   2 +-
 .../clientpositive/perf/tez/query40.q.out       |   2 +-
 .../clientpositive/perf/tez/query5.q.out        |   2 +-
 .../clientpositive/perf/tez/query77.q.out       |   2 +-
 .../clientpositive/perf/tez/query80.q.out       |   2 +-
 .../clientpositive/perf/tez/query82.q.out       |   2 +-
 .../clientpositive/perf/tez/query92.q.out       |   2 +-
 .../clientpositive/perf/tez/query94.q.out       |   2 +-
 .../clientpositive/perf/tez/query95.q.out       |   2 +-
 .../clientpositive/perf/tez/query98.q.out       |   2 +-
 .../clientpositive/singletsinsertorc.q.out      |  28 -
 .../results/clientpositive/spark/date_udf.q.out |   6 +-
 .../clientpositive/spark/orc_merge5.q.out       |  32 +-
 .../clientpositive/spark/orc_merge6.q.out       |  48 +-
 .../clientpositive/spark/orc_merge7.q.out       |  34 +-
 .../clientpositive/spark/orc_merge8.q.out       |  10 +-
 .../clientpositive/spark/orc_merge9.q.out       |  16 +-
 .../spark/orc_merge_incompat1.q.out             |  22 +-
 .../spark/orc_merge_incompat2.q.out             |  34 +-
 .../spark/parquet_vectorization_13.q.out        |  40 +-
 .../spark/parquet_vectorization_7.q.out         |  40 +-
 .../parquet_vectorization_decimal_date.q.out    |  20 +-
 .../clientpositive/spark/timestamp_1.q.out      |  48 +-
 .../clientpositive/spark/timestamp_2.q.out      |  48 +-
 .../clientpositive/spark/timestamp_3.q.out      |   4 +-
 .../clientpositive/spark/timestamp_udf.q.out    |   4 +-
 .../spark/vector_between_in.q.out               |  88 +--
 .../spark/vector_data_types.q.out               |   4 +-
 .../clientpositive/spark/vectorization_13.q.out |  40 +-
 .../spark/vectorization_decimal_date.q.out      |  34 +-
 .../spark/vectorization_short_regress.q.out     |  28 +-
 .../spark/vectorized_timestamp_funcs.q.out      |  58 +-
 .../tez/acid_vectorization_original.q.out       | 740 -------------------
 .../tez/acid_vectorization_original_tez.q.out   |  38 +-
 .../clientpositive/tez/orc_merge12.q.out        | 644 ++++++++--------
 .../test/results/clientpositive/timestamp.q.out |   6 +-
 .../results/clientpositive/timestamp_1.q.out    |  48 +-
 .../results/clientpositive/timestamp_2.q.out    |  48 +-
 .../results/clientpositive/timestamp_3.q.out    |   4 +-
 .../clientpositive/timestamp_comparison2.q.out  |  12 +-
 .../results/clientpositive/timestamp_dst.q.out  |   9 -
 .../clientpositive/timestamp_formats.q.out      |  84 +--
 .../clientpositive/timestamp_ints_casts.q.out   | 104 +--
 .../clientpositive/timestamp_literal.q.out      |   2 +-
 .../results/clientpositive/timestamp_udf.q.out  |   4 +-
 .../results/clientpositive/timestamptz_3.q.out  |   4 +-
 .../results/clientpositive/typechangetest.q.out |  36 +-
 ql/src/test/results/clientpositive/udf5.q.out   |   4 +-
 .../clientpositive/udf_folder_constants.q.out   |   4 +-
 .../clientpositive/udf_from_utc_timestamp.q.out |   4 +-
 .../test/results/clientpositive/udf_mask.q.out  |   2 +-
 .../clientpositive/udf_mask_first_n.q.out       |   2 +-
 .../clientpositive/udf_mask_last_n.q.out        |   2 +-
 .../clientpositive/udf_mask_show_first_n.q.out  |   2 +-
 .../clientpositive/udf_mask_show_last_n.q.out   |   2 +-
 .../results/clientpositive/udf_reflect2.q.out   |  20 +-
 .../clientpositive/udf_to_unix_timestamp.q.out  |   6 +-
 .../clientpositive/udf_to_utc_timestamp.q.out   |   2 +-
 .../clientpositive/udf_unix_timestamp.q.out     |   6 +-
 .../clientpositive/update_all_types.q.out       |   4 +-
 .../clientpositive/vector_aggregate_9.q.out     |   2 +-
 .../vector_binary_join_groupby.q.out            |   2 +-
 .../clientpositive/vector_case_when_2.q.out     | 118 +--
 .../clientpositive/vector_data_types.q.out      |   4 +-
 .../clientpositive/vector_decimal_1.q.out       |   2 +-
 .../clientpositive/vector_decimal_cast.q.out    |  40 +-
 .../vector_decimal_expressions.q.out            |  40 +-
 .../clientpositive/vector_interval_1.q.out      |  10 +-
 .../vector_interval_arithmetic.q.out            | 272 +++----
 .../clientpositive/vectorization_13.q.out       |  40 +-
 .../clientpositive/vectorization_7.q.out        |  40 +-
 .../vectorization_decimal_date.q.out            |  34 +-
 .../clientpositive/vectorized_casts.q.out       |  54 +-
 .../clientpositive/vectorized_date_funcs.q.out  | 554 +++++++-------
 .../clientpositive/vectorized_timestamp.q.out   |  12 +-
 .../vectorized_timestamp_funcs.q.out            |  56 +-
 .../vectorized_timestamp_ints_casts.q.out       | 104 +--
 .../clientpositive/windowing_distinct.q.out     |  24 +-
 .../apache/hadoop/hive/serde2/JsonSerDe.java    |   7 +-
 .../hadoop/hive/serde2/RandomTypeUtil.java      | 189 -----
 .../apache/hadoop/hive/serde2/RegexSerDe.java   |   4 +-
 .../hive/serde2/avro/AvroDeserializer.java      |  10 +-
 .../hadoop/hive/serde2/avro/AvroSerializer.java |  13 +-
 .../binarysortable/BinarySortableSerDe.java     |  18 +-
 .../fast/BinarySortableDeserializeRead.java     |   4 +-
 .../fast/BinarySortableSerializeWrite.java      |  14 +-
 .../hive/serde2/fast/DeserializeRead.java       |  12 +-
 .../hadoop/hive/serde2/fast/SerializeWrite.java |   4 +-
 .../hadoop/hive/serde2/io/DateWritableV2.java   | 154 ----
 .../serde2/io/TimestampLocalTZWritable.java     |   4 +-
 .../hive/serde2/io/TimestampWritableV2.java     | 625 ----------------
 .../hadoop/hive/serde2/lazy/LazyDate.java       |  12 +-
 .../hadoop/hive/serde2/lazy/LazyTimestamp.java  |  16 +-
 .../hadoop/hive/serde2/lazy/VerifyLazy.java     |  14 +-
 .../lazy/fast/LazySimpleDeserializeRead.java    |   2 +-
 .../lazy/fast/LazySimpleSerializeWrite.java     |  18 +-
 .../primitive/LazyDateObjectInspector.java      |   9 +-
 .../primitive/LazyTimestampObjectInspector.java |   6 +-
 .../hive/serde2/lazybinary/LazyBinaryDate.java  |   8 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |  10 +-
 .../serde2/lazybinary/LazyBinarySerDe2.java     |   8 +-
 .../serde2/lazybinary/LazyBinaryTimestamp.java  |   8 +-
 .../hive/serde2/lazybinary/LazyBinaryUtils.java |   4 +-
 .../fast/LazyBinaryDeserializeRead.java         |   4 +-
 .../fast/LazyBinarySerializeWrite.java          |  14 +-
 .../objectinspector/ObjectInspectorUtils.java   |  18 +-
 .../primitive/DateObjectInspector.java          |   7 +-
 .../JavaConstantDateObjectInspector.java        |   7 +-
 .../JavaConstantTimestampObjectInspector.java   |   7 +-
 .../primitive/JavaDateObjectInspector.java      |  31 +-
 .../primitive/JavaTimestampObjectInspector.java |  40 +-
 .../PrimitiveObjectInspectorConverter.java      |   8 +-
 .../PrimitiveObjectInspectorFactory.java        |   8 +-
 .../PrimitiveObjectInspectorUtils.java          |  62 +-
 .../primitive/SettableDateObjectInspector.java  |  13 +-
 .../SettableTimestampObjectInspector.java       |  13 +-
 .../primitive/TimestampObjectInspector.java     |   7 +-
 .../WritableConstantDateObjectInspector.java    |   9 +-
 ...ritableConstantTimestampObjectInspector.java |   9 +-
 .../primitive/WritableDateObjectInspector.java  |  37 +-
 .../WritableHiveCharObjectInspector.java        |  10 +-
 .../WritableHiveVarcharObjectInspector.java     |   6 +-
 .../WritableTimestampObjectInspector.java       |  38 +-
 .../hive/serde2/SerdeRandomRowSource.java       |   5 +-
 .../apache/hadoop/hive/serde2/VerifyFast.java   |  20 +-
 .../hive/serde2/binarysortable/MyTestClass.java |   6 +-
 .../binarysortable/MyTestPrimitiveClass.java    |  17 +-
 .../hadoop/hive/serde2/io/TestDateWritable.java |  80 +-
 .../hive/serde2/io/TestTimestampWritable.java   |  84 +--
 .../hive/serde2/lazy/TestLazyPrimitive.java     |   5 +-
 .../serde2/lazybinary/MyTestClassBigger.java    |   7 +-
 .../serde2/lazybinary/MyTestClassSmaller.java   |   7 +-
 .../TestStandardObjectInspectors.java           |   6 +-
 .../TestPrimitiveObjectInspectorUtils.java      |  96 +--
 .../hive/metastore/MetaStoreDirectSql.java      |   9 +-
 .../hadoop/hive/metastore/parser/Filter.g       |   2 -
 .../hive/metastore/utils/MetaStoreUtils.java    |   2 -
 .../ql/exec/vector/TimestampColumnVector.java   |   2 +-
 .../hadoop/hive/serde2/io/DateWritable.java     |   1 -
 .../ql/exec/vector/TestStructColumnVector.java  |  22 +-
 .../apache/hadoop/hive/tools/GenVectorCode.java |  14 +-
 500 files changed, 6284 insertions(+), 9702 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
index 2eeb7de..926f572 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
@@ -22,7 +22,8 @@ import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Map.Entry;
 
 import org.apache.accumulo.core.client.BatchWriter;
@@ -38,16 +39,14 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants;
 import org.apache.hadoop.hive.accumulo.AccumuloHiveRow;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyBoolean;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
@@ -235,17 +234,17 @@ public class TestHiveAccumuloTypes {
 
     // date
     baos.reset();
-    Date now = Date.ofEpochMilli(System.currentTimeMillis());
-    DateWritableV2 dateWritable = new DateWritableV2(now);
+    Date now = new Date(System.currentTimeMillis());
+    DateWritable dateWritable = new DateWritable(now);
     Date dateValue = dateWritable.get();
     dateWritable.write(out);
     m.put(cfBytes, "date".getBytes(), baos.toByteArray());
 
     // tiemestamp
     baos.reset();
-    Timestamp timestampValue = Timestamp.ofEpochMilli(System.currentTimeMillis());
+    Timestamp timestampValue = new Timestamp(now.getTime());
     ByteStream.Output output = new ByteStream.Output();
-    TimestampWritableV2 timestampWritable = new TimestampWritableV2(timestampValue);
+    TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
     timestampWritable.write(new DataOutputStream(output));
     output.close();
     m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
@@ -588,8 +587,8 @@ public class TestHiveAccumuloTypes {
     m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
 
     // date
-    Date now = Date.ofEpochMilli(System.currentTimeMillis());
-    DateWritableV2 dateWritable = new DateWritableV2(now);
+    Date now = new Date(System.currentTimeMillis());
+    DateWritable dateWritable = new DateWritable(now);
     Date dateValue = dateWritable.get();
     baos.reset();
     JavaDateObjectInspector dateOI = (JavaDateObjectInspector) PrimitiveObjectInspectorFactory
@@ -599,7 +598,7 @@ public class TestHiveAccumuloTypes {
     m.put(cfBytes, "date".getBytes(), baos.toByteArray());
 
     // timestamp
-    Timestamp timestampValue = Timestamp.valueOf(LocalDateTime.now().toString());
+    Timestamp timestampValue = new Timestamp(now.getTime());
     baos.reset();
     JavaTimestampObjectInspector timestampOI = (JavaTimestampObjectInspector) PrimitiveObjectInspectorFactory
         .getPrimitiveJavaObjectInspector(TypeInfoFactory

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
index 9df2aad..5f3baab 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/predicate/TestAccumuloRangeGenerator.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hive.accumulo.TestAccumuloDefaultIndexScanner;
 import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding;
 import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
@@ -53,6 +52,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hadoop/hive/common/type/Date.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Date.java b/common/src/java/org/apache/hadoop/hive/common/type/Date.java
deleted file mode 100644
index 6ecfcf6..0000000
--- a/common/src/java/org/apache/hadoop/hive/common/type/Date.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.common.type;
-
-import java.time.Instant;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.format.DateTimeParseException;
-import java.time.format.ResolverStyle;
-import java.time.format.SignStyle;
-
-import static java.time.temporal.ChronoField.DAY_OF_MONTH;
-import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
-import static java.time.temporal.ChronoField.YEAR;
-
-/**
- * This is the internal type for Date.
- * The full qualified input format of Date is "yyyy-MM-dd".
- */
-public class Date implements Comparable<Date> {
-
-  private static final LocalDate EPOCH = LocalDate.of(1970, 1, 1);
-  private static final DateTimeFormatter PARSE_FORMATTER;
-  private static final DateTimeFormatter PRINT_FORMATTER;
-  static {
-    DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
-    builder.appendValue(YEAR, 1, 10, SignStyle.NORMAL)
-        .appendLiteral('-')
-        .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NORMAL)
-        .appendLiteral('-')
-        .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NORMAL);
-    PARSE_FORMATTER = builder.toFormatter().withResolverStyle(ResolverStyle.LENIENT);
-    builder = new DateTimeFormatterBuilder();
-    builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
-    PRINT_FORMATTER = builder.toFormatter();
-  }
-
-  private LocalDate localDate;
-
-  private Date(LocalDate localDate) {
-    this.localDate = localDate != null ? localDate : EPOCH;
-  }
-
-  public Date() {
-    this(EPOCH);
-  }
-
-  public Date(Date d) {
-    this(d.localDate);
-  }
-
-  @Override
-  public String toString() {
-    return localDate.format(PRINT_FORMATTER);
-  }
-
-  public int hashCode() {
-    return localDate.hashCode();
-  }
-
-  @Override
-  public boolean equals(Object other) {
-    if (other instanceof Date) {
-      return compareTo((Date) other) == 0;
-    }
-    return false;
-  }
-
-  @Override
-  public int compareTo(Date o) {
-    return localDate.compareTo(o.localDate);
-  }
-
-  public int toEpochDay() {
-    return (int) localDate.toEpochDay();
-  }
-
-  public long toEpochSecond() {
-    return localDate.atStartOfDay().toEpochSecond(ZoneOffset.UTC);
-  }
-
-  public long toEpochMilli() {
-    return localDate.atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli();
-  }
-
-  public void setYear(int year) {
-    localDate = localDate.withYear(year);
-  }
-
-  public void setMonth(int month) {
-    localDate = localDate.withMonth(month);
-  }
-
-  public void setDayOfMonth(int dayOfMonth) {
-    localDate = localDate.withDayOfMonth(dayOfMonth);
-  }
-
-  public void setTimeInDays(int epochDay) {
-    localDate = LocalDate.ofEpochDay(epochDay);
-  }
-
-  public void setTimeInMillis(long epochMilli) {
-    localDate = LocalDateTime.ofInstant(
-        Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate();
-  }
-
-  public static Date valueOf(String s) {
-    s = s.trim();
-    int idx = s.indexOf(" ");
-    if (idx != -1) {
-      s = s.substring(0, idx);
-    }
-    LocalDate localDate;
-    try {
-      localDate = LocalDate.parse(s, PARSE_FORMATTER);
-    } catch (DateTimeParseException e) {
-      throw new IllegalArgumentException("Cannot create date, parsing error");
-    }
-    return new Date(localDate);
-  }
-
-  public static Date ofEpochDay(int epochDay) {
-    return new Date(LocalDate.ofEpochDay(epochDay));
-  }
-
-  public static Date ofEpochMilli(long epochMilli) {
-    return new Date(LocalDateTime.ofInstant(
-        Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate());
-  }
-
-  public static Date of(int year, int month, int dayOfMonth) {
-    return new Date(LocalDate.of(year, month, dayOfMonth));
-  }
-
-  public int getYear() {
-    return localDate.getYear();
-  }
-
-  public int getMonth() {
-    return localDate.getMonthValue();
-  }
-
-  public int getDay() {
-    return localDate.getDayOfMonth();
-  }
-
-  public int lengthOfMonth() {
-    return localDate.lengthOfMonth();
-  }
-
-  public int getDayOfWeek() {
-    return localDate.getDayOfWeek().plus(1).getValue();
-  }
-
-  /**
-   * Return a copy of this object.
-   */
-  public Object clone() {
-    // LocalDateTime is immutable.
-    return new Date(this.localDate);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java b/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
deleted file mode 100644
index a8b7b6d..0000000
--- a/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.common.type;
-
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.format.DateTimeParseException;
-import java.time.format.ResolverStyle;
-import java.time.format.SignStyle;
-import java.time.temporal.ChronoField;
-
-import static java.time.temporal.ChronoField.DAY_OF_MONTH;
-import static java.time.temporal.ChronoField.HOUR_OF_DAY;
-import static java.time.temporal.ChronoField.MINUTE_OF_HOUR;
-import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
-import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
-import static java.time.temporal.ChronoField.YEAR;
-
-/**
- * This is the internal type for Timestamp.
- * The full qualified input format of Timestamp is
- * "yyyy-MM-dd HH:mm:ss[.SSS...]", where the time part is optional.
- * If time part is absent, a default '00:00:00.0' will be used.
- */
-public class Timestamp implements Comparable<Timestamp> {
-  
-  private static final LocalDateTime EPOCH = LocalDateTime.of(1970, 1, 1, 0, 0, 0);
-  private static final DateTimeFormatter PARSE_FORMATTER;
-  private static final DateTimeFormatter PRINT_FORMATTER;
-
-  static {
-    DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
-    // Date part
-    builder.appendValue(YEAR, 1, 10, SignStyle.NORMAL)
-        .appendLiteral('-')
-        .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NORMAL)
-        .appendLiteral('-')
-        .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NORMAL);
-    // Time part
-    builder
-        .optionalStart().appendLiteral(" ")
-        .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NORMAL)
-        .appendLiteral(':')
-        .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NORMAL)
-        .appendLiteral(':')
-        .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NORMAL)
-        .optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).optionalEnd()
-        .optionalEnd();
-    PARSE_FORMATTER = builder.toFormatter().withResolverStyle(ResolverStyle.LENIENT);
-    builder = new DateTimeFormatterBuilder();
-    // Date and time parts
-    builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
-    // Fractional part
-    builder.optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd();
-    PRINT_FORMATTER = builder.toFormatter();
-  }
-
-  private LocalDateTime localDateTime;
-
-  /* Private constructor */
-  private Timestamp(LocalDateTime localDateTime) {
-    this.localDateTime = localDateTime != null ? localDateTime : EPOCH;
-  }
-
-  public Timestamp() {
-    this(EPOCH);
-  }
-
-  public Timestamp(Timestamp t) {
-    this(t.localDateTime);
-  }
-
-  public void set(Timestamp t) {
-    this.localDateTime = t != null ? t.localDateTime : EPOCH;
-  }
-
-  public String format(DateTimeFormatter formatter) {
-    return localDateTime.format(formatter);
-  }
-
-  @Override
-  public String toString() {
-    return localDateTime.format(PRINT_FORMATTER);
-  }
-
-  public int hashCode() {
-    return localDateTime.hashCode();
-  }
-
-  @Override
-  public boolean equals(Object other) {
-    if (other instanceof Timestamp) {
-      return compareTo((Timestamp) other) == 0;
-    }
-    return false;
-  }
-
-  @Override
-  public int compareTo(Timestamp o) {
-    return localDateTime.compareTo(o.localDateTime);
-  }
-
-  public long toEpochSecond() {
-    return localDateTime.toEpochSecond(ZoneOffset.UTC);
-  }
-
-  public void setTimeInSeconds(long epochSecond) {
-    setTimeInSeconds(epochSecond, 0);
-  }
-
-  public void setTimeInSeconds(long epochSecond, int nanos) {
-    localDateTime = LocalDateTime.ofEpochSecond(
-        epochSecond, nanos, ZoneOffset.UTC);
-  }
-
-  public long toEpochMilli() {
-    return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
-  }
-
-  public void setTimeInMillis(long epochMilli) {
-    localDateTime = LocalDateTime.ofInstant(
-        Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC);
-  }
-
-  public void setTimeInMillis(long epochMilli, int nanos) {
-    localDateTime = LocalDateTime
-        .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC)
-        .withNano(nanos);
-  }
-
-  public int getNanos() {
-    return localDateTime.getNano();
-  }
-
-  public static Timestamp valueOf(String s) {
-    s = s.trim();
-    LocalDateTime localDateTime;
-    try {
-      localDateTime = LocalDateTime.parse(s, PARSE_FORMATTER);
-    } catch (DateTimeParseException e) {
-      // Try ISO-8601 format
-      try {
-        localDateTime = LocalDateTime.parse(s);
-      } catch (DateTimeParseException e2) {
-        throw new IllegalArgumentException("Cannot create timestamp, parsing error");
-      }
-    }
-    return new Timestamp(localDateTime);
-  }
-
-  public static Timestamp ofEpochSecond(long epochSecond) {
-    return ofEpochSecond(epochSecond, 0);
-  }
-
-  public static Timestamp ofEpochSecond(long epochSecond, int nanos) {
-    return new Timestamp(
-        LocalDateTime.ofEpochSecond(epochSecond, nanos, ZoneOffset.UTC));
-  }
-
-  public static Timestamp ofEpochMilli(long epochMilli) {
-    return new Timestamp(LocalDateTime
-        .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC));
-  }
-
-  public static Timestamp ofEpochMilli(long epochMilli, int nanos) {
-    return new Timestamp(LocalDateTime
-        .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC)
-        .withNano(nanos));
-  }
-
-  public void setNanos(int nanos) {
-    localDateTime = localDateTime.withNano(nanos);
-  }
-
-  public int getYear() {
-    return localDateTime.getYear();
-  }
-
-  public int getMonth() {
-    return localDateTime.getMonthValue();
-  }
-
-  public int getDay() {
-    return localDateTime.getDayOfMonth();
-  }
-
-  public int getHours() {
-    return localDateTime.getHour();
-  }
-
-  public int getMinutes() {
-    return localDateTime.getMinute();
-  }
-
-  public int getSeconds() {
-    return localDateTime.getSecond();
-  }
-
-  public int getDayOfWeek() {
-    return localDateTime.getDayOfWeek().plus(1).getValue();
-  }
-
-  /**
-   * Return a copy of this object.
-   */
-  public Object clone() {
-    // LocalDateTime is immutable.
-    return new Timestamp(this.localDateTime);
-  }
-
-  public java.sql.Timestamp toSqlTimestamp() {
-    java.sql.Timestamp ts = new java.sql.Timestamp(toEpochMilli());
-    ts.setNanos(getNanos());
-    return ts;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
index 213650c..90ffddb 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.common.type;
 
+import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.time.DateTimeException;
@@ -30,6 +31,7 @@ import java.time.format.DateTimeParseException;
 import java.time.format.TextStyle;
 import java.time.temporal.ChronoField;
 import java.time.temporal.TemporalAccessor;
+import java.util.Date;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -43,6 +45,9 @@ public class TimestampTZUtil {
   private static final LocalTime DEFAULT_LOCAL_TIME = LocalTime.of(0, 0);
   private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d");
 
+  private static final ThreadLocal<DateFormat> CONVERT_FORMATTER =
+      ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
+
   static final DateTimeFormatter FORMATTER;
   static {
     DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
@@ -121,14 +126,11 @@ public class TimestampTZUtil {
     }
   }
 
-  // Converts Date to TimestampTZ.
+  // Converts Date to TimestampTZ. The conversion is done text-wise since
+  // Date/Timestamp should be treated as description of date/time.
   public static TimestampTZ convert(Date date, ZoneId defaultTimeZone) {
-    return parse(date.toString(), defaultTimeZone);
-  }
-
-  // Converts Timestamp to TimestampTZ.
-  public static TimestampTZ convert(Timestamp ts, ZoneId defaultTimeZone) {
-    return parse(ts.toString(), defaultTimeZone);
+    String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date);
+    return parse(s, defaultTimeZone);
   }
 
   public static ZoneId parseTimeZone(String timeZoneStr) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java
deleted file mode 100644
index ab60db1..0000000
--- a/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.common.type;
-
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-
-import java.math.BigDecimal;
-import java.time.DateTimeException;
-
-/**
- * Utilities for Timestamps and the relevant conversions.
- */
-public class TimestampUtils {
-  public static final BigDecimal BILLION_BIG_DECIMAL = BigDecimal.valueOf(1000000000);
-
-  /**
-   * Convert the timestamp to a double measured in seconds.
-   * @return double representation of the timestamp, accurate to nanoseconds
-   */
-  public static double getDouble(Timestamp ts) {
-    long seconds = ts.toEpochSecond();
-    return seconds + ((double) ts.getNanos()) / 1000000000;
-  }
-
-  public static Timestamp doubleToTimestamp(double f) {
-    try {
-      long seconds = (long) f;
-
-      // We must ensure the exactness of the double's fractional portion.
-      // 0.6 as the fraction part will be converted to 0.59999... and
-      // significantly reduce the savings from binary serialization
-      BigDecimal bd = new BigDecimal(String.valueOf(f));
-
-      bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
-      int nanos = bd.intValue();
-
-      // Convert to millis
-      long millis = seconds * 1000;
-      if (nanos < 0) {
-        millis -= 1000;
-        nanos += 1000000000;
-      }
-
-      return Timestamp.ofEpochMilli(millis, nanos);
-    } catch (IllegalArgumentException | DateTimeException nfe) {
-      return null;
-    }
-  }
-
-  /**
-   * Take a HiveDecimal and return the timestamp representation where the fraction part is the
-   * nanoseconds and integer part is the number of seconds.
-   * @param dec
-   * @return
-   */
-  public static Timestamp decimalToTimestamp(HiveDecimal dec) {
-    try {
-      HiveDecimalWritable nanosWritable = new HiveDecimalWritable(dec);
-      nanosWritable.mutateFractionPortion();               // Clip off seconds portion.
-      nanosWritable.mutateScaleByPowerOfTen(9);            // Bring nanoseconds into integer portion.
-      if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
-        return null;
-      }
-      int nanos = nanosWritable.intValue();
-      if (nanos < 0) {
-        nanos += 1000000000;
-      }
-      nanosWritable.setFromLong(nanos);
-
-      HiveDecimalWritable nanoInstant = new HiveDecimalWritable(dec);
-      nanoInstant.mutateScaleByPowerOfTen(9);
-
-      nanoInstant.mutateSubtract(nanosWritable);
-      nanoInstant.mutateScaleByPowerOfTen(-9);              // Back to seconds.
-      if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
-        return null;
-      }
-      long millis = nanoInstant.longValue() * 1000;
-      return Timestamp.ofEpochMilli(millis, nanos);
-    } catch (IllegalArgumentException | DateTimeException nfe) {
-      // E.g. DateTimeException: Invalid value for Year (valid values -999999999 - 999999999)
-      return null;
-    }
-  }
-
-  /**
-   * Take a HiveDecimalWritable and return the timestamp representation where the fraction part
-   * is the nanoseconds and integer part is the number of seconds.
-   *
-   * This is a HiveDecimalWritable variation with supplied scratch objects.
-   * @param decWritable
-   * @param scratchDecWritable1
-   * @param scratchDecWritable2
-   * @return
-   */
-  public static Timestamp decimalToTimestamp(
-      HiveDecimalWritable decWritable,
-      HiveDecimalWritable scratchDecWritable1, HiveDecimalWritable scratchDecWritable2) {
-
-    HiveDecimalWritable nanosWritable = scratchDecWritable1;
-    nanosWritable.set(decWritable);
-    nanosWritable.mutateFractionPortion();               // Clip off seconds portion.
-    nanosWritable.mutateScaleByPowerOfTen(9);            // Bring nanoseconds into integer portion.
-    if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
-      return null;
-    }
-    int nanos = nanosWritable.intValue();
-    if (nanos < 0) {
-      nanos += 1000000000;
-    }
-    nanosWritable.setFromLong(nanos);
-
-    HiveDecimalWritable nanoInstant = scratchDecWritable2;
-    nanoInstant.set(decWritable);
-    nanoInstant.mutateScaleByPowerOfTen(9);
-
-    nanoInstant.mutateSubtract(nanosWritable);
-    nanoInstant.mutateScaleByPowerOfTen(-9);              // Back to seconds.
-    if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
-      return null;
-    }
-    long seconds = nanoInstant.longValue();
-
-    return Timestamp.ofEpochSecond(seconds, nanos);
-  }
-
-  public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) {
-    try {
-      BigDecimal nanoInstant = dec.bigDecimalValue().multiply(BILLION_BIG_DECIMAL);
-      int nanos = nanoInstant.remainder(BILLION_BIG_DECIMAL).intValue();
-      if (nanos < 0) {
-        nanos += 1000000000;
-      }
-      long seconds =
-          nanoInstant.subtract(new BigDecimal(nanos)).divide(BILLION_BIG_DECIMAL).longValue();
-
-      return Timestamp.ofEpochSecond(seconds, nanos);
-    } catch (IllegalArgumentException | DateTimeException nfe) {
-      return null;
-    }
-  }
-
-  /**
-   * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of
-   * seconds. 500 would round to 0, -500 would round to -1.
-   */
-  public static long millisToSeconds(long millis) {
-    if (millis >= 0) {
-      return millis / 1000;
-    } else {
-      return (millis - 999) / 1000;
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 18f30ad..3691786 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1893,7 +1893,7 @@ public class HiveConf extends Configuration {
         "Maximum fraction of heap that can be used by Parquet file writers in one task.\n" +
         "It is for avoiding OutOfMemory error in tasks. Work with Parquet 1.6.0 and above.\n" +
         "This config parameter is defined in Parquet, so that it does not start with 'hive.'."),
-    HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION("hive.parquet.timestamp.skip.conversion", false,
+    HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION("hive.parquet.timestamp.skip.conversion", true,
       "Current Hive implementation of parquet stores timestamps to UTC, this flag allows skipping of the conversion" +
       "on reading parquet files from other tools"),
     HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS("hive.int.timestamp.conversion.in.seconds", false,

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hive/common/util/DateParser.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/DateParser.java b/common/src/java/org/apache/hive/common/util/DateParser.java
index 5db14f1..949fdba 100644
--- a/common/src/java/org/apache/hive/common/util/DateParser.java
+++ b/common/src/java/org/apache/hive/common/util/DateParser.java
@@ -17,18 +17,24 @@
  */
 package org.apache.hive.common.util;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Date;
+import java.text.ParsePosition;
+import java.text.SimpleDateFormat;
 
 /**
  * Date parser class for Hive.
  */
 public class DateParser {
-
+  private final SimpleDateFormat formatter;
+  private final ParsePosition pos;
   public DateParser() {
- }
+    formatter = new SimpleDateFormat("yyyy-MM-dd");
+    // TODO: ideally, we should set formatter.setLenient(false);
+    pos = new ParsePosition(0);
+  }
 
   public Date parseDate(String strValue) {
-    Date result = new Date();
+    Date result = new Date(0);
     if (parseDate(strValue, result)) {
       return result;
     }
@@ -36,16 +42,12 @@ public class DateParser {
   }
 
   public boolean parseDate(String strValue, Date result) {
-    Date parsedVal;
-    try {
-      parsedVal = Date.valueOf(strValue);
-    } catch (IllegalArgumentException e) {
-      parsedVal = null;
-    }
+    pos.setIndex(0);
+    java.util.Date parsedVal = formatter.parse(strValue, pos);
     if (parsedVal == null) {
       return false;
     }
-    result.setTimeInMillis(parsedVal.toEpochMilli());
+    result.setTime(parsedVal.getTime());
     return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hive/common/util/DateUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/DateUtils.java b/common/src/java/org/apache/hive/common/util/DateUtils.java
index e70de28..65f3b94 100644
--- a/common/src/java/org/apache/hive/common/util/DateUtils.java
+++ b/common/src/java/org/apache/hive/common/util/DateUtils.java
@@ -20,7 +20,6 @@ package org.apache.hive.common.util;
 
 import java.math.BigDecimal;
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 
 /**
  * DateUtils. Thread-safe class
@@ -33,7 +32,6 @@ public class DateUtils {
     protected SimpleDateFormat initialValue() {
       SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
       simpleDateFormat.setLenient(false);
-      simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
       return simpleDateFormat;
     }
   };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/java/org/apache/hive/common/util/TimestampParser.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/TimestampParser.java b/common/src/java/org/apache/hive/common/util/TimestampParser.java
index d30ab88..f674b5d 100644
--- a/common/src/java/org/apache/hive/common/util/TimestampParser.java
+++ b/common/src/java/org/apache/hive/common/util/TimestampParser.java
@@ -18,18 +18,19 @@
 
 package org.apache.hive.common.util;
 
+import java.math.BigDecimal;
+import java.sql.Timestamp;
 import java.util.Arrays;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Optional;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.joda.time.DateTime;
-import org.joda.time.LocalDateTime;
+import org.joda.time.IllegalInstantException;
 import org.joda.time.MutableDateTime;
 import org.joda.time.DateTimeFieldType;
-import org.joda.time.chrono.ISOChronology;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.DateTimeFormatterBuilder;
@@ -51,8 +52,19 @@ public class TimestampParser {
 
   protected final static String[] stringArray = new String[] {};
   protected final static String millisFormatString = "millis";
-  protected final static DateTime startingDateValue =
-      new DateTime(1970, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
+  @Nullable
+  private final static DateTime startingDateValue = makeStartingDateValue();
+
+  @Nullable
+  private static DateTime makeStartingDateValue() {
+    try {
+      return new DateTime(1970, 1, 1, 0, 0, 0, 0);
+    } catch (IllegalInstantException e) {
+      // 1970-01-01 00:00:00 did not exist in some zones. In these zones, we need to take different,
+      // less optimal parsing route.
+      return null;
+    }
+  }
 
   protected String[] formatStrings = null;
   protected DateTimeFormatter fmt = null;
@@ -114,24 +126,21 @@ public class TimestampParser {
 
     if (startingDateValue != null) {
       // reset value in case any date fields are missing from the date pattern
-      MutableDateTime mdt = new MutableDateTime(
-          startingDateValue, ISOChronology.getInstanceUTC());
+      MutableDateTime mdt = new MutableDateTime(startingDateValue);
 
       // Using parseInto() avoids throwing exception when parsing,
       // allowing fallback to default timestamp parsing if custom patterns fail.
       int ret = fmt.parseInto(mdt, strValue, 0);
       // Only accept parse results if we parsed the entire string
       if (ret == strValue.length()) {
-        return Optional.of(Timestamp.ofEpochMilli(mdt.getMillis()));
+        return Optional.of(new Timestamp(mdt.getMillis()));
       }
       return Optional.empty();
     }
 
     try {
-      LocalDateTime dt = fmt.parseLocalDateTime(strValue);
-      return Optional.of(
-          Timestamp.ofEpochMilli(
-              dt.toDateTime(ISOChronology.getInstanceUTC().getZone()).getMillis()));
+      DateTime dt = fmt.parseDateTime(strValue);
+      return Optional.of(new Timestamp(dt.getMillis()));
     } catch (IllegalArgumentException e) {
       return Optional.empty();
     }
@@ -172,8 +181,7 @@ public class TimestampParser {
 
       // Joda DateTime only has precision to millis, cut off any fractional portion
       long millis = Long.parseLong(matcher.group(1));
-      DateTime dt =
-          new DateTime(millis, ISOChronology.getInstanceUTC());
+      DateTime dt = new DateTime(millis);
       for (DateTimeFieldType field : dateTimeFields) {
         bucket.saveField(field, dt.get(field));
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
index 1435339..72dce4d 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
@@ -17,18 +17,26 @@
  */
 package org.apache.hadoop.hive.common.type;
 
+import java.sql.Timestamp;
 import java.util.Random;
+import java.util.Arrays;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.orc.impl.SerializationUtils;
 import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
 
 import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
 
 import org.junit.*;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
index cd23abe..5a3f048 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.common.type;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.sql.Timestamp;
 import java.time.ZoneId;
 import java.time.format.DateTimeParseException;
 import java.util.TimeZone;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/test/org/apache/hive/common/util/TestDateParser.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hive/common/util/TestDateParser.java b/common/src/test/org/apache/hive/common/util/TestDateParser.java
index 8c3a7a4..0553b3d 100644
--- a/common/src/test/org/apache/hive/common/util/TestDateParser.java
+++ b/common/src/test/org/apache/hive/common/util/TestDateParser.java
@@ -18,13 +18,13 @@
 package org.apache.hive.common.util;
 
 import static org.junit.Assert.*;
-
-import org.apache.hadoop.hive.common.type.Date;
 import org.junit.Test;
 
+import java.sql.Date;
+
 public class TestDateParser {
   DateParser parser = new DateParser();
-  Date date = new Date();
+  Date date = new Date(0);
 
   void checkValidCase(String strValue, Date expected) {
     Date dateValue = parser.parseDate(strValue);
@@ -57,6 +57,7 @@ public class TestDateParser {
     checkValidCase(" 1946-01-01", Date.valueOf("1946-01-01"));
     checkValidCase(" 2001-11-12 01:02:03", Date.valueOf("2001-11-12"));
 
+    // Current date parsing is lenient
     checkValidCase("2001-13-12", Date.valueOf("2002-01-12"));
     checkValidCase("2001-11-31", Date.valueOf("2001-12-01"));
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
index 00a7904..c982af6 100644
--- a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
+++ b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
@@ -18,10 +18,11 @@
 
 package org.apache.hive.common.util;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
+import static org.junit.Assert.*;
 import org.junit.Test;
 
 public class TestTimestampParser {
@@ -64,11 +65,10 @@ public class TestTimestampParser {
             Timestamp.valueOf("1945-12-31 23:59:59.1234")),
         new ValidTimestampCase("1970-01-01 00:00:00",
             Timestamp.valueOf("1970-01-01 00:00:00")),
-        new ValidTimestampCase("1945-12-31T23:59:59",
-            Timestamp.valueOf("1945-12-31 23:59:59")),
     };
 
     String[] invalidCases = {
+        "1945-12-31T23:59:59",
         "12345",
     };
 
@@ -111,12 +111,11 @@ public class TestTimestampParser {
             Timestamp.valueOf("2001-01-01 00:00:00")),
         new ValidTimestampCase("1945-12-31 23:59:59.1234",
             Timestamp.valueOf("1945-12-31 23:59:59.1234")),
-        new ValidTimestampCase("1945-12-31T23:59:59.12345",
-            Timestamp.valueOf("1945-12-31 23:59:59.12345"))
     };
 
     String[] invalidCases = {
         "1945-12-31-23:59:59",
+        "1945-12-31T23:59:59.12345", // our pattern didn't specify 5 decimal places
         "12345",
     };
 
@@ -134,20 +133,19 @@ public class TestTimestampParser {
     TimestampParser tp = new TimestampParser(patterns);
 
     ValidTimestampCase[] validCases = {
-        new ValidTimestampCase("0", Timestamp.ofEpochMilli(0)),
-        new ValidTimestampCase("-1000000", Timestamp.ofEpochMilli(-1000000)),
-        new ValidTimestampCase("1420509274123", Timestamp.ofEpochMilli(1420509274123L)),
-        new ValidTimestampCase("1420509274123.456789", Timestamp.ofEpochMilli(1420509274123L)),
+        new ValidTimestampCase("0", new Timestamp(0)),
+        new ValidTimestampCase("-1000000", new Timestamp(-1000000)),
+        new ValidTimestampCase("1420509274123", new Timestamp(1420509274123L)),
+        new ValidTimestampCase("1420509274123.456789", new Timestamp(1420509274123L)),
 
         // Other format pattern should also work
         new ValidTimestampCase("1945-12-31T23:59:59",
             Timestamp.valueOf("1945-12-31 23:59:59")),
-        new ValidTimestampCase("1945-12-31T23:59:59.12345",
-            Timestamp.valueOf("1945-12-31 23:59:59.12345")),
     };
 
     String[] invalidCases = {
         "1945-12-31-23:59:59",
+        "1945-12-31T23:59:59.12345", // our pattern didn't specify 5 decimal places
         "1420509274123-",
     };
 
@@ -169,11 +167,10 @@ public class TestTimestampParser {
             Timestamp.valueOf("1970-01-01 05:06:00")),
         new ValidTimestampCase("05:06:07",
             Timestamp.valueOf("1970-05-06 00:00:07")),
-        new ValidTimestampCase("1945-12-31T23:59:59",
-            Timestamp.valueOf("1945-12-31 23:59:59")),
     };
 
     String[] invalidCases = {
+        "1945-12-31T23:59:59",
         "1945:12:31-",
         "12345",
     };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
index f0e12a2..a9e7837 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
@@ -19,21 +19,6 @@ package org.apache.hadoop.hive.druid.serde;
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.google.common.base.Function;
-import java.io.IOException;
-import java.io.InputStream;
-import java.time.Instant;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.stream.Collectors;
-
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import io.druid.query.Druids;
@@ -42,10 +27,8 @@ import io.druid.query.metadata.metadata.ColumnAnalysis;
 import io.druid.query.metadata.metadata.SegmentAnalysis;
 import io.druid.query.metadata.metadata.SegmentMetadataQuery;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -59,13 +42,13 @@ import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -99,6 +82,23 @@ import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.stream.Collectors;
+
 import static org.apache.hadoop.hive.druid.serde.DruidSerDeUtils.TIMESTAMP_FORMAT;
 import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
 
@@ -307,9 +307,9 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
       final Object res;
       switch (types[i].getPrimitiveCategory()) {
       case TIMESTAMP:
-        res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector())
-            .getPrimitiveJavaObject(values.get(i)).toEpochMilli();
-          break;
+        res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i))
+            .getTime();
+        break;
       case TIMESTAMPLOCALTZ:
         res = ((TimestampLocalTZObjectInspector) fields.get(i).getFieldObjectInspector())
             .getPrimitiveJavaObject(values.get(i)).getZonedDateTime().toInstant().toEpochMilli();
@@ -330,24 +330,22 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
         res = ((FloatObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
         break;
       case DOUBLE:
-        res = ((DoubleObjectInspector) fields.get(i).getFieldObjectInspector())
-            .get(values.get(i));
+        res = ((DoubleObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
         break;
       case CHAR:
-        res = ((HiveCharObjectInspector) fields.get(i).getFieldObjectInspector())
-            .getPrimitiveJavaObject(values.get(i)).getValue();
+        res = ((HiveCharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i))
+            .getValue();
         break;
       case VARCHAR:
-        res = ((HiveVarcharObjectInspector) fields.get(i).getFieldObjectInspector())
-            .getPrimitiveJavaObject(values.get(i)).getValue();
+        res =
+            ((HiveVarcharObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i))
+                .getValue();
         break;
       case STRING:
-        res = ((StringObjectInspector) fields.get(i).getFieldObjectInspector())
-            .getPrimitiveJavaObject(values.get(i));
+        res = ((StringObjectInspector) fields.get(i).getFieldObjectInspector()).getPrimitiveJavaObject(values.get(i));
         break;
       case BOOLEAN:
-        res = ((BooleanObjectInspector) fields.get(i).getFieldObjectInspector())
-            .get(values.get(i));
+        res = ((BooleanObjectInspector) fields.get(i).getFieldObjectInspector()).get(values.get(i));
         break;
       default:
         throw new SerDeException("Unsupported type: " + types[i].getPrimitiveCategory());
@@ -362,7 +360,8 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
         fields.get(granularityFieldIndex).getFieldName().equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME));
     value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME,
         ((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector())
-            .getPrimitiveJavaObject(values.get(granularityFieldIndex)).toEpochMilli());
+            .getPrimitiveJavaObject(values.get(granularityFieldIndex)).getTime()
+    );
     if (values.size() == columns.length + 2) {
       // Then partition number if any.
       final int partitionNumPos = granularityFieldIndex + 1;
@@ -396,11 +395,11 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
       switch (types[i].getPrimitiveCategory()) {
       case TIMESTAMP:
         if (value instanceof Number) {
-          output.add(new TimestampWritableV2(Timestamp.valueOf(
+          output.add(new TimestampWritable(Timestamp.valueOf(
               ZonedDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), tsTZTypeInfo.timeZone())
                   .format(DateTimeFormatter.ofPattern(TIMESTAMP_FORMAT)))));
         } else {
-          output.add(new TimestampWritableV2(Timestamp.valueOf((String) value)));
+          output.add(new TimestampWritable(Timestamp.valueOf((String) value)));
         }
 
         break;
@@ -418,14 +417,12 @@ import static org.joda.time.format.ISODateTimeFormat.dateOptionalTimeParser;
             ))));
         break;
       case DATE:
-        final DateWritableV2 dateWritable;
+        final DateWritable dateWritable;
         if (value instanceof Number) {
-          dateWritable = new DateWritableV2(
-              Date.ofEpochMilli((((Number) value).longValue())));
+          dateWritable = new DateWritable(new Date((((Number) value).longValue())));
         } else {
           // it is an extraction fn need to be parsed
-          dateWritable = new DateWritableV2(
-              Date.ofEpochMilli(dateOptionalTimeParser().parseDateTime((String) value).getMillis()));
+          dateWritable = new DateWritable(new Date(dateOptionalTimeParser().parseDateTime((String) value).getMillis()));
         }
         output.add(dateWritable);
         break;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
index 060c654..922c1db 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
@@ -27,6 +27,7 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.lang.reflect.InvocationTargetException;
+import java.sql.Timestamp;
 import java.time.Instant;
 import java.time.ZoneOffset;
 import java.util.ArrayList;
@@ -34,11 +35,20 @@ import java.util.List;
 import java.util.Map.Entry;
 import java.util.Properties;
 
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.util.concurrent.SettableFuture;
+import com.metamx.http.client.HttpClient;
+import com.metamx.http.client.response.HttpResponseHandler;
+import io.druid.data.input.Row;
+import io.druid.query.Result;
+import io.druid.query.select.SelectResultValue;
+import io.druid.query.timeseries.TimeseriesResultValue;
+import io.druid.query.topn.TopNResultValue;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils;
@@ -52,10 +62,11 @@ import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -69,25 +80,17 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
+
 import org.junit.Before;
 import org.junit.Test;
 
 import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.SettableFuture;
-import com.metamx.http.client.HttpClient;
-import com.metamx.http.client.response.HttpResponseHandler;
 
-import io.druid.data.input.Row;
 import io.druid.query.Query;
-import io.druid.query.Result;
-import io.druid.query.select.SelectResultValue;
-import io.druid.query.timeseries.TimeseriesResultValue;
-import io.druid.query.topn.TopNResultValue;
 
 /**
  * Basic tests for Druid SerDe. The examples are taken from Druid 0.9.1.1
@@ -775,7 +778,7 @@ public class TestDruidSerDe {
       new IntWritable(1112123),
       new ShortWritable((short) 12),
       new ByteWritable((byte) 0),
-      new TimestampWritableV2(Timestamp.ofEpochSecond(1377907200L)) // granularity
+      new TimestampWritable(new Timestamp(1377907200000L)) // granularity
   };
   private static final DruidWritable DRUID_WRITABLE = new DruidWritable(
       ImmutableMap.<String, Object>builder()

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
index 3805779..bc4e146 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
@@ -102,7 +102,7 @@ public class HBaseRowSerializer {
         timestamp = ((LongObjectInspector)inspector).get(value);
       } else {
         PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
-        timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
+        timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
index 8b73bfb..05cc30a 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator;
 import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
@@ -295,7 +296,7 @@ class HiveHBaseInputFormatUtil {
         timestamp = ((LongObjectInspector) inspector).get(value);
       } else {
         PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
-        timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
+        timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
       }
     } catch (HiveException e) {
       throw new IOException(e);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
index 2aeaa33..d94dbe8 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
@@ -23,7 +23,6 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -162,8 +161,7 @@ public class LazyHBaseRow extends LazyStruct {
         }
         LazyObjectBase lz = fields[fieldID];
         if (lz instanceof LazyTimestamp) {
-          ((LazyTimestamp) lz).getWritableObject().set(
-              Timestamp.ofEpochMilli(timestamp));
+          ((LazyTimestamp) lz).getWritableObject().setTime(timestamp);
         } else {
           ((LazyLong) lz).getWritableObject().set(timestamp);
         }


[22/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
index aa6246d..c786684 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
@@ -215,10 +215,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n0@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n0@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
 6	1	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n0
@@ -522,10 +522,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n0@part=2
 insert_num	part	b	s2
 1	1	original	NULL
 2	1	original	NULL
-3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
 7	2	new	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
 8	1	new	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
 PREHOOK: query: drop table part_add_various_various_struct2_n0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
index 92f6f3a..c835afd 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
@@ -324,16 +324,16 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n0
 POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n0@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
-101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416490068	original
 101	1	true	true	true	true	true	true	true	true	true	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 102	1	false	false	false	false	false	false	false	false	false	127	127	127	127	127	127	127	127	127	127	127	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	new
-102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 104	1	true	true	true	true	true	true	true	true	true	23	23	23	23	23	23	23	23	23	23	23	834	834	834	834	834	834	834	834	834	834	834	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	new
 105	1	false	false	false	false	false	false	false	false	false	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	new
-105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n0
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n0
@@ -576,11 +576,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n0
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n0@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n0
 PREHOOK: type: DROPTABLE
@@ -750,11 +750,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n0
 POSTHOOK: Input: default@part_change_various_various_timestamp_n0@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 09:54:28.970117179	6229-06-28 09:54:28.97011	6229-06-28 09:54:28.97011	1950-12-18 08:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	2049-12-18 08:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 13:34:14	1978-08-02 13:34:14	1978-08-02 13:34:14	2021-09-24 07:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-07 00:20:39.72036854	1991-01-07 00:20:39.72036	1991-01-07 00:20:39.72036	2024-11-11 08:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n0
 PREHOOK: type: DROPTABLE
@@ -908,10 +908,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n0
 POSTHOOK: Input: default@part_change_various_various_date_n0@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	b
-101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-27	original
-102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-08	original
+101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-28	original
+102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-09	original
 103	1	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-01	original
+104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-02	original
 105	1	2024-11-11	2024-11-11	2024-11-11	1991-01-06	original
 111	1	1964-01-24	1964-01-24	1964-01-24	1964-01-24	new
 PREHOOK: query: drop table part_change_various_various_date_n0

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
index 6cfbb2f..9c2460f 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
@@ -325,16 +325,16 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n4
 POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n4@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
-101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416490068	original
 101	1	true	true	true	true	true	true	true	true	true	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 102	1	false	false	false	false	false	false	false	false	false	127	127	127	127	127	127	127	127	127	127	127	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	new
-102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 104	1	true	true	true	true	true	true	true	true	true	23	23	23	23	23	23	23	23	23	23	23	834	834	834	834	834	834	834	834	834	834	834	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	new
 105	1	false	false	false	false	false	false	false	false	false	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	new
-105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n4
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n4
@@ -578,11 +578,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n4
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n4@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n4
 PREHOOK: type: DROPTABLE
@@ -753,11 +753,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n4
 POSTHOOK: Input: default@part_change_various_various_timestamp_n4@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 09:54:28.970117179	6229-06-28 09:54:28.97011	6229-06-28 09:54:28.97011	1950-12-18 08:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	2049-12-18 08:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 13:34:14	1978-08-02 13:34:14	1978-08-02 13:34:14	2021-09-24 07:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-07 00:20:39.72036854	1991-01-07 00:20:39.72036	1991-01-07 00:20:39.72036	2024-11-11 08:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n4
 PREHOOK: type: DROPTABLE
@@ -912,10 +912,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n4
 POSTHOOK: Input: default@part_change_various_various_date_n4@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	b
-101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-27	original
-102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-08	original
+101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-28	original
+102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-09	original
 103	1	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-01	original
+104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-02	original
 105	1	2024-11-11	2024-11-11	2024-11-11	1991-01-06	original
 111	1	1964-01-24	1964-01-24	1964-01-24	1964-01-24	new
 PREHOOK: query: drop table part_change_various_various_date_n4

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
index ff23f05..6973081 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
@@ -606,11 +606,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n3
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n3
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
index 97c8730..51d72d7 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
@@ -609,11 +609,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n5
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n5
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive.q.out
index 29054b1..f0e2888 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive.q.out
@@ -298,15 +298,15 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n2@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
 101	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
-101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	-28	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	31716	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272478692	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	84	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	-8620	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272503892	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416490068	original
 102	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	127	127	127	127	127	127	127	127	127	NULL	NULL	NULL	32767	32767	32767	32767	32767	32767	32767	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	NULL	NULL	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	NULL	new
-102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	106	0	127	-1	-1	0	0	NULL	32767	32767	32767	-17302	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563868266	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	-38	0	127	-1	-1	0	0	NULL	32767	32767	32767	7898	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563893466	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
 104	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	23	23	23	23	23	23	23	23	23	NULL	NULL	NULL	834	834	834	834	834	834	834	834	NULL	NULL	NULL	NULL	203332	203332	203332	203332	203332	203332	203332	NULL	NULL	NULL	NULL	NULL	888888847499264	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	NULL	new
-104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	-26	1	23	6724	3734	-100	30	NULL	834	834	834	27366	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	86	1	23	6724	3734	-100	30	NULL	834	834	834	-12970	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 105	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	-99	-99	-99	-99	-99	-99	-99	-99	-99	NULL	NULL	NULL	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	NULL	NULL	NULL	NULL	-999992	-999992	-999992	-999992	-999992	-999992	-999992	NULL	NULL	NULL	NULL	NULL	-222282153984	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	NULL	new
-105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	87	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	20055	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	-41	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	-16681	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n2
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n2
@@ -522,11 +522,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n2
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n2@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	NULL	NULL	NULL	-46114.000000000000000000	-46114.000000000000000000	-46114.285000000000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.000000034120603E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n2
 PREHOOK: type: DROPTABLE
@@ -669,11 +669,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n2
 POSTHOOK: Input: default@part_change_various_various_timestamp_n2@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n2
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out
index 003457b..ee65e41 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_part_all_primitive_llap_io.q.out
@@ -344,15 +344,15 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n1@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
 101	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
-101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	-28	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	31716	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272478692	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	84	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	-8620	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272503892	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416490068	original
 102	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	127	127	127	127	127	127	127	127	127	NULL	NULL	NULL	32767	32767	32767	32767	32767	32767	32767	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	NULL	NULL	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	NULL	new
-102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	106	0	127	-1	-1	0	0	NULL	32767	32767	32767	-17302	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563868266	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	-38	0	127	-1	-1	0	0	NULL	32767	32767	32767	7898	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563893466	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
 104	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	23	23	23	23	23	23	23	23	23	NULL	NULL	NULL	834	834	834	834	834	834	834	834	NULL	NULL	NULL	NULL	203332	203332	203332	203332	203332	203332	203332	NULL	NULL	NULL	NULL	NULL	888888847499264	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	NULL	new
-104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	-26	1	23	6724	3734	-100	30	NULL	834	834	834	27366	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	86	1	23	6724	3734	-100	30	NULL	834	834	834	-12970	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 105	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	-99	-99	-99	-99	-99	-99	-99	-99	-99	NULL	NULL	NULL	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	NULL	NULL	NULL	NULL	-999992	-999992	-999992	-999992	-999992	-999992	-999992	NULL	NULL	NULL	NULL	NULL	-222282153984	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	NULL	new
-105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	87	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	20055	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	-41	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	-16681	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n1
@@ -614,11 +614,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n1
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n1@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	NULL	NULL	NULL	-46114.000000000000000000	-46114.000000000000000000	-46114.285000000000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.000000034120603E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n1
 PREHOOK: type: DROPTABLE
@@ -807,11 +807,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n1
 POSTHOOK: Input: default@part_change_various_various_timestamp_n1@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n1
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out
index 5c49695..512f492 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive.q.out
@@ -324,15 +324,15 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n8@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
 101	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
-101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	-28	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	31716	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272478692	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	84	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	-8620	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272503892	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416490068	original
 102	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	127	127	127	127	127	127	127	127	127	NULL	NULL	NULL	32767	32767	32767	32767	32767	32767	32767	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	NULL	NULL	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	NULL	new
-102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	106	0	127	-1	-1	0	0	NULL	32767	32767	32767	-17302	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563868266	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	-38	0	127	-1	-1	0	0	NULL	32767	32767	32767	7898	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563893466	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
 104	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	23	23	23	23	23	23	23	23	23	NULL	NULL	NULL	834	834	834	834	834	834	834	834	NULL	NULL	NULL	NULL	203332	203332	203332	203332	203332	203332	203332	NULL	NULL	NULL	NULL	NULL	888888847499264	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	NULL	new
-104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	-26	1	23	6724	3734	-100	30	NULL	834	834	834	27366	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	86	1	23	6724	3734	-100	30	NULL	834	834	834	-12970	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 105	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	-99	-99	-99	-99	-99	-99	-99	-99	-99	NULL	NULL	NULL	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	NULL	NULL	NULL	NULL	-999992	-999992	-999992	-999992	-999992	-999992	-999992	NULL	NULL	NULL	NULL	NULL	-222282153984	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	NULL	new
-105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	87	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	20055	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	-41	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	-16681	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n8
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n8
@@ -574,11 +574,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n8
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n8@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	NULL	NULL	NULL	-46114.000000000000000000	-46114.000000000000000000	-46114.285000000000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.000000034120603E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n8
 PREHOOK: type: DROPTABLE
@@ -747,11 +747,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n8
 POSTHOOK: Input: default@part_change_various_various_timestamp_n8@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n8
 PREHOOK: type: DROPTABLE


[27/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
index 6fd8e09..f163289 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
@@ -23,7 +23,7 @@ import org.junit.Test;
 import java.sql.Timestamp;
 import java.util.Random;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
 
 import static org.junit.Assert.*;
@@ -45,7 +45,7 @@ public class TestTimestampWritableAndColumnVector {
     Timestamp[] randTimestamps = new Timestamp[VectorizedRowBatch.DEFAULT_SIZE];
 
     for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
-      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r).toSqlTimestamp();
+      Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
       randTimestamps[i] = randTimestamp;
       timestampColVector.set(i, randTimestamp);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
index ffdc410..e65288b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
@@ -71,7 +71,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -1867,8 +1867,8 @@ public class TestVectorGroupByOperator {
           } else if (key instanceof LongWritable) {
             LongWritable lwKey = (LongWritable)key;
             keyValue.add(lwKey.get());
-          } else if (key instanceof TimestampWritableV2) {
-            TimestampWritableV2 twKey = (TimestampWritableV2)key;
+          } else if (key instanceof TimestampWritable) {
+            TimestampWritable twKey = (TimestampWritable)key;
             keyValue.add(twKey.getTimestamp());
           } else if (key instanceof DoubleWritable) {
             DoubleWritable dwKey = (DoubleWritable)key;
@@ -1988,9 +1988,9 @@ public class TestVectorGroupByOperator {
         } else if (key instanceof LongWritable) {
           LongWritable lwKey = (LongWritable)key;
           keyValue = lwKey.get();
-        } else if (key instanceof TimestampWritableV2) {
-          TimestampWritableV2 twKey = (TimestampWritableV2)key;
-          keyValue = twKey.getTimestamp().toSqlTimestamp();
+        } else if (key instanceof TimestampWritable) {
+          TimestampWritable twKey = (TimestampWritable)key;
+          keyValue = twKey.getTimestamp();
         } else if (key instanceof DoubleWritable) {
           DoubleWritable dwKey = (DoubleWritable)key;
           keyValue = dwKey.get();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
index 70a481d..f51b8bb 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
@@ -21,12 +21,12 @@ package org.apache.hadoop.hive.ql.exec.vector;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.BRoundWithNumDigitsDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColOrCol;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
index 6181ae8..ae91b73 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.text.ParseException;
-
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -31,14 +32,12 @@ import java.util.Set;
 import org.apache.commons.lang.StringUtils;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
@@ -1299,11 +1298,15 @@ public class VectorRandomRowSource {
   }
 
   public static String randomPrimitiveDateStringObject(Random r) {
-    return RandomTypeUtil.getRandDate(r).toString();
+    Date randomDate = RandomTypeUtil.getRandDate(r);
+    String randomDateString = randomDate.toString();
+    return randomDateString;
   }
 
   public static String randomPrimitiveTimestampStringObject(Random r) {
-    return RandomTypeUtil.getRandTimestamp(r).toString();
+    Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(r);
+    String randomTimestampString = randomTimestamp.toString();
+    return randomTimestampString;
   }
 
   public static HiveChar getRandHiveChar(Random r, CharTypeInfo charTypeInfo) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
index 458aae8..ec5ad23 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
@@ -19,17 +19,15 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import junit.framework.TestCase;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -37,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -58,6 +56,8 @@ import org.apache.hadoop.io.Text;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -240,7 +240,7 @@ public class VectorVerifyFast {
       case DATE:
       {
         Date value = deserializeRead.currentDateWritable.get();
-        Date expected = ((DateWritableV2) object).get();
+        Date expected = ((DateWritable) object).get();
         if (!value.equals(expected)) {
           TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
         }
@@ -249,7 +249,7 @@ public class VectorVerifyFast {
       case TIMESTAMP:
       {
         Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
-        Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
+        Timestamp expected = ((TimestampWritable) object).getTimestamp();
         if (!value.equals(expected)) {
           TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
         }
@@ -390,13 +390,13 @@ public class VectorVerifyFast {
       break;
       case DATE:
       {
-        Date value = ((DateWritableV2) object).get();
+        Date value = ((DateWritable) object).get();
         serializeWrite.writeDate(value);
       }
       break;
       case TIMESTAMP:
       {
-        Timestamp value = ((TimestampWritableV2) object).getTimestamp();
+        Timestamp value = ((TimestampWritable) object).getTimestamp();
         serializeWrite.writeTimestamp(value);
       }
       break;
@@ -567,9 +567,9 @@ public class VectorVerifyFast {
     case DECIMAL:
       return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritableV2(deserializeRead.currentDateWritable);
+      return new DateWritable(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
+      return new TimestampWritable(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
index b5ad22c..ffe9c81 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
@@ -18,13 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.junit.Assert;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -32,8 +25,12 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFMonth;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
@@ -41,33 +38,34 @@ import org.apache.hadoop.io.LongWritable;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.internal.runners.statements.Fail;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
 
 public class TestVectorDateExpressions {
 
   private ExecutorService runner;
 
   /* copied over from VectorUDFTimestampFieldLong */
-  private TimestampWritableV2 toTimestampWritable(long daysSinceEpoch) {
-    return new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
-            DateWritableV2.daysToMillis((int) daysSinceEpoch)));
+  private TimestampWritable toTimestampWritable(long daysSinceEpoch) {
+    Timestamp ts = new Timestamp(DateWritable.daysToMillis((int) daysSinceEpoch));
+    return new TimestampWritable(ts);
   }
 
   private int[] getAllBoundaries() {
     List<Integer> boundaries = new ArrayList<Integer>(1);
-    Calendar c = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+    Calendar c = Calendar.getInstance();
     c.setTimeInMillis(0); // c.set doesn't reset millis
     for (int year = 1902; year <= 2038; year++) {
       c.set(year, Calendar.JANUARY, 1, 0, 0, 0);
@@ -110,12 +108,10 @@ public class TestVectorDateExpressions {
     return batch;
   }
 
-  private void compareToUDFYearDate(long t, int y) throws HiveException {
+  private void compareToUDFYearDate(long t, int y) {
     UDFYear udf = new UDFYear();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -174,12 +170,10 @@ public class TestVectorDateExpressions {
     verifyUDFYear(batch);
   }
 
-  private void compareToUDFDayOfMonthDate(long t, int y) throws HiveException {
+  private void compareToUDFDayOfMonthDate(long t, int y) {
     UDFDayOfMonth udf = new UDFDayOfMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -238,12 +232,10 @@ public class TestVectorDateExpressions {
     verifyUDFDayOfMonth(batch);
   }
 
-  private void compareToUDFMonthDate(long t, int y) throws HiveException {
+  private void compareToUDFMonthDate(long t, int y) {
     UDFMonth udf = new UDFMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = toTimestampWritable(t);
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = toTimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -302,7 +294,7 @@ public class TestVectorDateExpressions {
     verifyUDFMonth(batch);
   }
 
-  private LongWritable getLongWritable(TimestampWritableV2 i) {
+  private LongWritable getLongWritable(TimestampWritable i) {
     LongWritable result = new LongWritable();
     if (i == null) {
       return null;
@@ -313,11 +305,11 @@ public class TestVectorDateExpressions {
   }
 
   private void compareToUDFUnixTimeStampDate(long t, long y) {
-    TimestampWritableV2 tsw = toTimestampWritable(t);
+    TimestampWritable tsw = toTimestampWritable(t);
     LongWritable res = getLongWritable(tsw);
     if(res.get() != y) {
       System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t,
-              tsw.getTimestamp().toEpochMilli()/1000);
+              tsw.getTimestamp().getTime()/1000);
     }
 
     Assert.assertEquals(res.get(), y);
@@ -380,7 +372,7 @@ public class TestVectorDateExpressions {
 
   private void compareToUDFWeekOfYearDate(long t, int y) {
     UDFWeekOfYear udf = new UDFWeekOfYear();
-    TimestampWritableV2 tsw = toTimestampWritable(t);
+    TimestampWritable tsw = toTimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
index aa91344..0bae9b4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.SettableListObjectInspector;
@@ -91,8 +91,7 @@ public class TestVectorExpressionWriters {
 
 
   private Writable getWritableValue(TypeInfo ti, Timestamp value) {
-    return new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(value.getTime(), value.getNanos()));
+    return new TimestampWritable(value);
   }
 
   private Writable getWritableValue(TypeInfo ti, HiveDecimal value) {
@@ -124,8 +123,7 @@ public class TestVectorExpressionWriters {
       return new BooleanWritable( value == 0 ? false : true);
     } else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) {
       Timestamp ts = new Timestamp(value);
-      TimestampWritableV2 tw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tw = new TimestampWritable(ts);
       return tw;
     }
     return null;
@@ -248,8 +246,8 @@ public class TestVectorExpressionWriters {
       Writable w = (Writable) vew.writeValue(tcv, i);
       if (w != null) {
         Writable expected = getWritableValue(type, timestampValues[i]);
-        TimestampWritableV2 t1 = (TimestampWritableV2) expected;
-        TimestampWritableV2 t2 = (TimestampWritableV2) w;
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) w;
         Assert.assertTrue(t1.equals(t2));
        } else {
         Assert.assertTrue(tcv.isNull[i]);
@@ -272,8 +270,8 @@ public class TestVectorExpressionWriters {
       values[i] = vew.setValue(values[i], tcv, i);
       if (values[i] != null) {
         Writable expected = getWritableValue(type, timestampValues[i]);
-        TimestampWritableV2 t1 = (TimestampWritableV2) expected;
-        TimestampWritableV2 t2 = (TimestampWritableV2) values[i];
+        TimestampWritable t1 = (TimestampWritable) expected;
+        TimestampWritable t2 = (TimestampWritable) values[i];
         Assert.assertTrue(t1.equals(t2));
       } else {
         Assert.assertTrue(tcv.isNull[i]);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
index e7884b2..9792951 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -40,23 +40,16 @@ import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 
 public class TestVectorGenericDateExpressions {
 
   private Charset utf8 = StandardCharsets.UTF_8;
   private int size = 200;
   private Random random = new Random();
-  private SimpleDateFormat formatter = getFormatter();
+  private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private List<PrimitiveCategory> dateTimestampStringTypes =
       Arrays.<PrimitiveCategory>asList(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP, PrimitiveCategory.STRING);
 
-  private static SimpleDateFormat getFormatter() {
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-    return formatter;
-  }
-
   private long newRandom(int i) {
     return random.nextInt(i);
   }
@@ -95,7 +88,7 @@ public class TestVectorGenericDateExpressions {
   }
 
   private Timestamp toTimestamp(long date) {
-    return new Timestamp(DateWritableV2.daysToMillis((int) date));
+    return new Timestamp(DateWritable.daysToMillis((int) date));
   }
 
   private BytesColumnVector toString(LongColumnVector date) {
@@ -114,7 +107,7 @@ public class TestVectorGenericDateExpressions {
   }
 
   private byte[] toString(long date) {
-    String formatted = formatter.format(new Date(DateWritableV2.daysToMillis((int) date)));
+    String formatted = formatter.format(new Date(DateWritable.daysToMillis((int) date)));
     return formatted.getBytes(utf8);
   }
 
@@ -675,7 +668,7 @@ public class TestVectorGenericDateExpressions {
       if (date.isNull[i]) {
         Assert.assertTrue(output.isNull[i]);
       } else {
-        String expected = formatter.format(new Date(DateWritableV2.daysToMillis((int) date.vector[i])));
+        String expected = formatter.format(new Date(DateWritable.daysToMillis((int) date.vector[i])));
         Assert.assertEquals(expected, actual);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
index 14d1343..e81844c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.io.UnsupportedEncodingException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.Random;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDoub
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
 
@@ -287,8 +287,8 @@ public class TestVectorMathFunctions {
     outV = new LongColumnVector(longValues.length);
     for (int i = 0; i < longValues.length; i++) {
       Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
-      longValues[i] = TimestampWritableV2.getLong(randTimestamp);
-      inV.set(0, randTimestamp.toSqlTimestamp());
+      longValues[i] = TimestampWritable.getLong(randTimestamp);
+      inV.set(0, randTimestamp);
     }
 
     batch.cols[0] = inV;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
index 650bac4..f6dbd67 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
@@ -26,19 +26,13 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Date;
+import java.util.List;
 import java.util.Random;
-import java.util.TimeZone;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFHour;
-import org.apache.hadoop.hive.ql.udf.UDFMinute;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
-import org.apache.hadoop.hive.ql.udf.UDFSecond;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -46,13 +40,18 @@ import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
+import org.apache.hadoop.hive.ql.udf.UDFHour;
+import org.apache.hadoop.hive.ql.udf.UDFMinute;
+import org.apache.hadoop.hive.ql.udf.UDFMonth;
+import org.apache.hadoop.hive.ql.udf.UDFSecond;
 import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.junit.Test;
 
@@ -61,13 +60,7 @@ import org.junit.Test;
  */
 public class TestVectorTimestampExpressions {
 
-  private SimpleDateFormat dateFormat = getFormatter();
-
-  private static SimpleDateFormat getFormatter() {
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-    return formatter;
-  }
+  private SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
   private Timestamp[] getAllBoundaries(int minYear, int maxYear) {
      ArrayList<Timestamp> boundaries = new ArrayList<Timestamp>(1);
@@ -102,7 +95,7 @@ public class TestVectorTimestampExpressions {
     TimestampColumnVector tcv = new TimestampColumnVector(size);
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
-      tcv.set(i, RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
+      tcv.set(i, RandomTypeUtil.getRandTimestamp(rand));
     }
     batch.cols[0] = tcv;
     batch.cols[1] = new LongColumnVector(size);
@@ -116,7 +109,7 @@ public class TestVectorTimestampExpressions {
     Random rand = new Random(seed);
     for (int i = 0; i < size; i++) {
       /* all 32 bit numbers qualify & multiply up to get nano-seconds */
-      byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
+      byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand));
       bcv.vector[i] = encoded;
       bcv.start[i] = 0;
       bcv.length[i] = encoded.length;
@@ -231,13 +224,14 @@ public class TestVectorTimestampExpressions {
     TIMESTAMP_LONG, STRING_LONG
   }
 
-  private void compareToUDFYearLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFYearLong(Timestamp t, int y) {
     UDFYear udf = new UDFYear();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
+    if (res.get() != y) {
+      System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(),
+          tsw.getTimestamp().getTime()/1000);
+    }
     Assert.assertEquals(res.get(), y);
   }
 
@@ -328,13 +322,10 @@ public class TestVectorTimestampExpressions {
     Assert.assertEquals(true, lcv.isNull[0]);
   }
 
-  private void compareToUDFDayOfMonthLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFDayOfMonthLong(Timestamp t, int y) {
     UDFDayOfMonth udf = new UDFDayOfMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -418,13 +409,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFDayOfMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFHourLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFHourLong(Timestamp t, int y) {
     UDFHour udf = new UDFHour();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -507,13 +495,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFHour(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMinuteLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFMinuteLong(Timestamp t, int y) {
     UDFMinute udf = new UDFMinute();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -597,13 +582,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMinute(TestType.STRING_LONG);
   }
 
-  private void compareToUDFMonthLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFMonthLong(Timestamp t, int y) {
     UDFMonth udf = new UDFMonth();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -686,13 +668,10 @@ public class TestVectorTimestampExpressions {
     testVectorUDFMonth(TestType.STRING_LONG);
   }
 
-  private void compareToUDFSecondLong(Timestamp t, int y) throws HiveException {
+  private void compareToUDFSecondLong(Timestamp t, int y) {
     UDFSecond udf = new UDFSecond();
-    udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector});
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
-    IntWritable res = (IntWritable) udf.evaluate(
-        new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)});
+    TimestampWritable tsw = new TimestampWritable(t);
+    IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }
 
@@ -865,8 +844,7 @@ public class TestVectorTimestampExpressions {
 
   private void compareToUDFWeekOfYearLong(Timestamp t, int y) {
     UDFWeekOfYear udf = new UDFWeekOfYear();
-    TimestampWritableV2 tsw = new TimestampWritableV2(
-        org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+    TimestampWritable tsw = new TimestampWritable(t);
     IntWritable res = udf.evaluate(tsw);
     Assert.assertEquals(res.get(), y);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
index 58e3fa3..fa8f465 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExtract.java
@@ -18,20 +18,32 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import junit.framework.Assert;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource;
 import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
-import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
+import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.TestVectorDateAddSub.ColumnScalarMode;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
@@ -44,23 +56,34 @@ import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.Text;
-import org.junit.Test;
+import org.apache.hadoop.io.LongWritable;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
+import junit.framework.Assert;
+
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class TestVectorTimestampExtract {
 
@@ -85,6 +108,14 @@ public class TestVectorTimestampExtract {
     doTimestampExtractTests(random, "string");
   }
 
+  public enum TimestampExtractTestMode {
+    ROW_MODE,
+    ADAPTOR,
+    VECTOR_EXPRESSION;
+
+    static final int count = values().length;
+  }
+
   private void doTimestampExtractTests(Random random, String typeName)
       throws Exception {
 
@@ -161,15 +192,13 @@ public class TestVectorTimestampExtract {
       return;
     }
 
-    final GenericUDF udf;
+    final UDF udf;
     switch (extractFunctionName) {
     case "day":
       udf = new UDFDayOfMonth();
       break;
     case "dayofweek":
-      GenericUDFBridge dayOfWeekUDFBridge = new GenericUDFBridge();
-      dayOfWeekUDFBridge.setUdfClassName(UDFDayOfWeek.class.getName());
-      udf = dayOfWeekUDFBridge;
+      udf = new UDFDayOfWeek();
       break;
     case "hour":
       udf = new UDFHour();
@@ -184,9 +213,7 @@ public class TestVectorTimestampExtract {
       udf = new UDFSecond();
       break;
     case "yearweek":
-      GenericUDFBridge weekOfYearUDFBridge = new GenericUDFBridge();
-      weekOfYearUDFBridge.setUdfClassName(UDFWeekOfYear.class.getName());
-      udf = weekOfYearUDFBridge;
+      udf = new UDFWeekOfYear();
       break;
     case "year":
       udf = new UDFYear();
@@ -195,8 +222,11 @@ public class TestVectorTimestampExtract {
       throw new RuntimeException("Unexpected extract function name " + extractFunctionName);
     }
 
+    GenericUDFBridge genericUDFBridge = new GenericUDFBridge();
+    genericUDFBridge.setUdfClassName(udf.getClass().getName());
+
     ExprNodeGenericFuncDesc exprDesc =
-        new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
+        new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, genericUDFBridge, children);
 
     final int rowCount = randomRows.length;
     Object[][] resultObjectsArray = new Object[TimestampExtractTestMode.count][];
@@ -312,10 +342,10 @@ public class TestVectorTimestampExtract {
       Object result;
       switch (dateTimeStringPrimitiveCategory) {
       case TIMESTAMP:
-        result = evaluator.evaluate((TimestampWritableV2) object);
+        result = evaluator.evaluate((TimestampWritable) object);
         break;
       case DATE:
-        result = evaluator.evaluate((DateWritableV2) object);
+        result = evaluator.evaluate((DateWritable) object);
         break;
       case STRING:
         {
@@ -432,12 +462,4 @@ public class TestVectorTimestampExtract {
 
     return true;
   }
-
-  public enum TimestampExtractTestMode {
-    ROW_MODE,
-    ADAPTOR,
-    VECTOR_EXPRESSION;
-
-    static final int count = values().length;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 4ed087e..58ed151 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -22,8 +22,12 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.UnsupportedEncodingException;
 import java.math.BigDecimal;
+import java.math.MathContext;
+import java.math.RoundingMode;
 import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
@@ -31,7 +35,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -39,10 +43,12 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -82,7 +88,7 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     for (int i = 0; i < intValues.length; i++) {
       Timestamp timestamp = resultV.asScratchTimestamp(i);
-      long actual = DateWritableV2.millisToDays(timestamp.getTime());
+      long actual = DateWritable.millisToDays(timestamp.getTime());
       assertEquals(actual, intValues[i]);
     }
   }
@@ -149,8 +155,7 @@ public class TestVectorTypeCasts {
     expr.evaluate(b);
     for (int i = 0; i < longValues.length; i++) {
       Timestamp timestamp = resultV.asScratchTimestamp(i);
-      long actual = TimestampWritableV2.getLong(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()));
+      long actual = TimestampWritable.getLong(timestamp);
       assertEquals(actual, longValues[i]);
     }
   }
@@ -513,8 +518,7 @@ public class TestVectorTypeCasts {
       Timestamp ts = new Timestamp(millis);
       int nanos = RandomTypeUtil.randomNanos(r);
       ts.setNanos(nanos);
-      TimestampWritableV2 tsw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tsw = new TimestampWritable(ts);
       double asDouble = tsw.getDouble();
       doubleValues[i] = asDouble;
       HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
@@ -578,8 +582,7 @@ public class TestVectorTypeCasts {
       long millis = RandomTypeUtil.randomMillis(r);
       Timestamp ts = new Timestamp(millis);
       ts.setNanos(optionalNanos);
-      TimestampWritableV2 tsw = new TimestampWritableV2(
-          org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      TimestampWritable tsw = new TimestampWritable(ts);
       hiveDecimalValues[i] = tsw.getHiveDecimal();
 
       tcv.set(i, ts);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
index c908f66..f257363 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedHashMap;
@@ -27,17 +29,15 @@ import java.util.Map;
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -45,7 +45,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
@@ -63,6 +63,7 @@ import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 
 /**
  * (Copy of VerifyFast from serde).
@@ -244,7 +245,7 @@ public class VerifyFastRow {
         case DATE:
           {
             Date value = deserializeRead.currentDateWritable.get();
-            Date expected = ((DateWritableV2) object).get();
+            Date expected = ((DateWritable) object).get();
             if (!value.equals(expected)) {
               TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -253,7 +254,7 @@ public class VerifyFastRow {
         case TIMESTAMP:
           {
             Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
-            Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp expected = ((TimestampWritable) object).getTimestamp();
             if (!value.equals(expected)) {
               TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
             }
@@ -394,13 +395,13 @@ public class VerifyFastRow {
           break;
         case DATE:
           {
-            Date value = ((DateWritableV2) object).get();
+            Date value = ((DateWritable) object).get();
             serializeWrite.writeDate(value);
           }
           break;
         case TIMESTAMP:
           {
-            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
+            Timestamp value = ((TimestampWritable) object).getTimestamp();
             serializeWrite.writeTimestamp(value);
           }
           break;
@@ -571,9 +572,9 @@ public class VerifyFastRow {
     case DECIMAL:
       return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritableV2(deserializeRead.currentDateWritable);
+      return new DateWritable(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
+      return new TimestampWritable(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
index 878d286..3fe8b09 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
@@ -22,7 +22,7 @@ import java.sql.Timestamp;
 import java.util.Random;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -86,7 +86,7 @@ public class VectorizedRowGroupGenUtil {
     tcv.noNulls = !nulls;
     tcv.isRepeating = repeating;
 
-    Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
+    Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand);
 
     int nullFrequency = generateNullFrequency(rand);
 
@@ -98,7 +98,7 @@ public class VectorizedRowGroupGenUtil {
       }else {
         tcv.isNull[i] = false;
         if (!repeating) {
-          Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
+          Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
           tcv.set(i,  randomTimestamp);
           timestampValues[i] = randomTimestamp;
         } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
index 9bf9d9d..1064b19 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
@@ -22,7 +22,7 @@ import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.Random;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -31,6 +31,8 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType;
 import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.Text;
 
 public class VectorColumnGroupGenerator {
 
@@ -230,7 +232,7 @@ public class VectorColumnGroupGenerator {
 
     case TIMESTAMP:
       {
-        Timestamp value = RandomTypeUtil.getRandTimestamp(random).toSqlTimestamp();
+        Timestamp value = RandomTypeUtil.getRandTimestamp(random);
         ((Timestamp[]) array)[rowIndex] = value;
       }
       break;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
index c9a5812..ce25c3e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
@@ -26,14 +26,13 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -41,7 +40,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -58,6 +57,7 @@ import org.apache.hadoop.io.Text;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.sql.Timestamp;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -111,20 +111,20 @@ public class TestArrowColumnarBatchSerDe {
   private final static Timestamp NEGATIVE_TIMESTAMP_WITHOUT_NANOS;
 
   static {
-    TIMESTAMP = Timestamp.ofEpochMilli(TIME_IN_MILLIS);
-    NEGATIVE_TIMESTAMP_WITHOUT_NANOS = Timestamp.ofEpochMilli(NEGATIVE_TIME_IN_MILLIS);
+    TIMESTAMP = new Timestamp(TIME_IN_MILLIS);
+    NEGATIVE_TIMESTAMP_WITHOUT_NANOS = new Timestamp(NEGATIVE_TIME_IN_MILLIS);
   }
 
   private final static Object[][] DTI_ROWS = {
       {
-          new DateWritableV2(DateWritableV2.millisToDays(TIME_IN_MILLIS)),
-          new TimestampWritableV2(TIMESTAMP),
+          new DateWritable(DateWritable.millisToDays(TIME_IN_MILLIS)),
+          new TimestampWritable(TIMESTAMP),
           new HiveIntervalYearMonthWritable(new HiveIntervalYearMonth(1, 2)),
           new HiveIntervalDayTimeWritable(new HiveIntervalDayTime(1, 2, 3, 4, 5_000_000))
       },
       {
-          new DateWritableV2(DateWritableV2.millisToDays(NEGATIVE_TIME_IN_MILLIS)),
-          new TimestampWritableV2(NEGATIVE_TIMESTAMP_WITHOUT_NANOS),
+          new DateWritable(DateWritable.millisToDays(NEGATIVE_TIME_IN_MILLIS)),
+          new TimestampWritable(NEGATIVE_TIMESTAMP_WITHOUT_NANOS),
           null,
           null
       },
@@ -364,10 +364,10 @@ public class TestArrowColumnarBatchSerDe {
                         newArrayList(text("hello")),
                         input -> text(input.toString().toUpperCase())),
                     intW(0))), // c16:array<struct<m:map<string,string>,n:int>>
-            new TimestampWritableV2(TIMESTAMP), // c17:timestamp
+            new TimestampWritable(TIMESTAMP), // c17:timestamp
             decimalW(HiveDecimal.create(0, 0)), // c18:decimal(16,7)
             new BytesWritable("Hello".getBytes()), // c19:binary
-            new DateWritableV2(123), // c20:date
+            new DateWritable(123), // c20:date
             varcharW("x", 20), // c21:varchar(20)
             charW("y", 15), // c22:char(15)
             new BytesWritable("world!".getBytes()), // c23:binary
@@ -508,9 +508,9 @@ public class TestArrowColumnarBatchSerDe {
     Object[][] rows = new Object[size][];
     for (int i = 0; i < size; i++) {
       long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
-      Timestamp timestamp = Timestamp.ofEpochMilli(rand.nextBoolean() ? millis : -millis);
+      Timestamp timestamp = new Timestamp(rand.nextBoolean() ? millis : -millis);
       timestamp.setNanos(rand.nextInt(1000) * 1000);
-      rows[i] = new Object[] {new TimestampWritableV2(timestamp)};
+      rows[i] = new Object[] {new TimestampWritable(timestamp)};
     }
 
     initAndSerializeAndDeserialize(schema, rows);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 208aeb5..fe475f6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -28,6 +28,8 @@ import java.net.URISyntaxException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -48,9 +50,7 @@ import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidWriteIdList;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
@@ -181,8 +181,8 @@ public class TestInputOutputFormat {
       decimalValue = HiveDecimal.create(x);
       long millisUtc = x * MILLIS_IN_DAY;
       millisUtc -= LOCAL_TIMEZONE.getOffset(millisUtc);
-      dateValue = Date.ofEpochMilli(millisUtc);
-      timestampValue = Timestamp.ofEpochMilli(millisUtc);
+      dateValue = new Date(millisUtc);
+      timestampValue = new Timestamp(millisUtc);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index 97d4fc6..dc0da9c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -25,8 +25,11 @@ import static junit.framework.Assert.assertTrue;
 
 import java.io.File;
 import java.io.IOException;
+import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -40,18 +43,16 @@ import com.google.common.primitives.Longs;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -541,7 +542,7 @@ public class TestOrcFile {
     while (rows.hasNext()) {
       Object row = rows.next(null);
       Timestamp tlistTimestamp = tslist.get(idx++);
-      if (tlistTimestamp.getNanos() != ((TimestampWritableV2) row).getNanos()) {
+      if (tlistTimestamp.getNanos() != ((TimestampWritable) row).getNanos()) {
         assertTrue(false);
       }
     }
@@ -1311,10 +1312,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for (int ms = 1000; ms < 2000; ++ms) {
         row.setFieldValue(0,
-            new TimestampWritableV2(Timestamp.valueOf(year + "-05-05 12:34:56."
+            new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56."
                 + ms)));
         row.setFieldValue(1,
-            new DateWritableV2(Date.of(year - 1900, 11, 25)));
+            new DateWritable(new Date(year - 1900, 11, 25)));
         writer.addRow(row);
       }
     }
@@ -1325,10 +1326,10 @@ public class TestOrcFile {
     for (int year = minYear; year < maxYear; ++year) {
       for(int ms = 1000; ms < 2000; ++ms) {
         row = (OrcStruct) rows.next(row);
-        assertEquals(new TimestampWritableV2
+        assertEquals(new TimestampWritable
                 (Timestamp.valueOf(year + "-05-05 12:34:56." + ms)),
             row.getFieldValue(0));
-        assertEquals(new DateWritableV2(Date.of(year - 1900, 11, 25)),
+        assertEquals(new DateWritable(new Date(year - 1900, 11, 25)),
             row.getFieldValue(1));
       }
     }
@@ -1382,12 +1383,12 @@ public class TestOrcFile {
     OrcStruct row = new OrcStruct(3);
     OrcUnion union = new OrcUnion();
     row.setFieldValue(1, union);
-    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")));
     HiveDecimal value = HiveDecimal.create("12345678.6547456");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     union.set((byte) 0, new IntWritable(42));
     writer.addRow(row);
-    row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
+    row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")));
     union.set((byte) 1, new Text("hello"));
     value = HiveDecimal.create("-5643.234");
     row.setFieldValue(2, new HiveDecimalWritable(value));
@@ -1402,14 +1403,14 @@ public class TestOrcFile {
     union.set((byte) 1, null);
     writer.addRow(row);
     union.set((byte) 0, new IntWritable(200000));
-    row.setFieldValue(0, new TimestampWritableV2
+    row.setFieldValue(0, new TimestampWritable
         (Timestamp.valueOf("1970-01-01 00:00:00")));
     value = HiveDecimal.create("10000000000000000000");
     row.setFieldValue(2, new HiveDecimalWritable(value));
     writer.addRow(row);
     Random rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
-      row.setFieldValue(0, new TimestampWritableV2(Timestamp.valueOf(i +
+      row.setFieldValue(0, new TimestampWritable(Timestamp.valueOf(i +
           "-05-05 12:34:56." + i)));
       if ((i & 1) == 0) {
         union.set((byte) 0, new IntWritable(i*i));
@@ -1489,7 +1490,7 @@ public class TestOrcFile {
     inspector = reader.getObjectInspector();
     assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
         inspector.getTypeName());
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-12 15:00:00")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-12 15:00:00")),
         row.getFieldValue(0));
     union = (OrcUnion) row.getFieldValue(1);
     assertEquals(0, union.getTag());
@@ -1498,7 +1499,7 @@ public class TestOrcFile {
         row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
     assertEquals(2, rows.getRowNumber());
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());
@@ -1520,7 +1521,7 @@ public class TestOrcFile {
     assertEquals(null, union.getObject());
     assertEquals(null, row.getFieldValue(2));
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("1970-01-01 00:00:00")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("1970-01-01 00:00:00")),
         row.getFieldValue(0));
     assertEquals(new IntWritable(200000), union.getObject());
     assertEquals(new HiveDecimalWritable(HiveDecimal.create("10000000000000000000")),
@@ -1528,7 +1529,7 @@ public class TestOrcFile {
     rand = new Random(42);
     for(int i=1970; i < 2038; ++i) {
       row = (OrcStruct) rows.next(row);
-      assertEquals(new TimestampWritableV2(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
+      assertEquals(new TimestampWritable(Timestamp.valueOf(i + "-05-05 12:34:56." + i)),
           row.getFieldValue(0));
       if ((i & 1) == 0) {
         assertEquals(0, union.getTag());
@@ -1555,7 +1556,7 @@ public class TestOrcFile {
     assertEquals(reader.getNumberOfRows(), rows.getRowNumber());
     rows.seekToRow(1);
     row = (OrcStruct) rows.next(row);
-    assertEquals(new TimestampWritableV2(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
+    assertEquals(new TimestampWritable(Timestamp.valueOf("2000-03-20 12:00:00.123456789")),
         row.getFieldValue(0));
     assertEquals(1, union.getTag());
     assertEquals(new Text("hello"), union.getObject());

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
index 092da69..81d2e2d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcSerDeStats.java
@@ -420,12 +420,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1668, rawDataSize);
+    assertEquals(1740, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1668, reader.getRawDataSize());
+    assertEquals(1740, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -438,9 +438,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,
@@ -514,12 +514,12 @@ public class TestOrcSerDeStats {
     long rowCount = writer.getNumberOfRows();
     long rawDataSize = writer.getRawDataSize();
     assertEquals(2, rowCount);
-    assertEquals(1668, rawDataSize);
+    assertEquals(1740, rawDataSize);
     Reader reader = OrcFile.createReader(testFilePath,
         OrcFile.readerOptions(conf).filesystem(fs));
 
     assertEquals(2, reader.getNumberOfRows());
-    assertEquals(1668, reader.getRawDataSize());
+    assertEquals(1740, reader.getRawDataSize());
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
     assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
@@ -532,9 +532,9 @@ public class TestOrcSerDeStats {
     assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
     assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
     assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
-    assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
+    assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
     assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
-    assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
+    assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
     assertEquals(1195,
         reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
     assertEquals(185,

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
index 2071d13..c23f00e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.File;
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
 import java.util.Random;
 
 import junit.framework.Assert;
@@ -27,9 +29,7 @@ import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -38,17 +38,18 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.orc.TypeDescription;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -133,7 +134,7 @@ public class TestVectorizedORCReader {
     for (int i = 0; i < 21000; ++i) {
       if ((i % 7) != 0) {
         writer.addRow(new MyRecord(((i % 3) == 0), (byte)(i % 5), i, (long) 200, (short) (300 + i), (double) (400 + i),
-            words[r1.nextInt(words.length)], Timestamp.valueOf(LocalDateTime.now().toString()),
+            words[r1.nextInt(words.length)], new Timestamp(Calendar.getInstance().getTime().getTime()),
             Date.valueOf(dates[i % 3]), HiveDecimal.create(decimalStrings[i % decimalStrings.length])));
       } else {
         writer.addRow(new MyRecord(null, null, i, (long) 200, null, null, null, null, null, null));
@@ -173,21 +174,19 @@ public class TestVectorizedORCReader {
             Long temp = (long) (((BooleanWritable) a).get() ? 1 : 0);
             long b = ((LongColumnVector) cv).vector[rowId];
             Assert.assertEquals(temp.toString(), Long.toString(b));
-          } else if (a instanceof TimestampWritableV2) {
+          } else if (a instanceof TimestampWritable) {
             // Timestamps are stored as long, so convert and compare
-            TimestampWritableV2 t = ((TimestampWritableV2) a);
+            TimestampWritable t = ((TimestampWritable) a);
             TimestampColumnVector tcv = ((TimestampColumnVector) cv);
-            java.sql.Timestamp ts = tcv.asScratchTimestamp(rowId);
-            Assert.assertEquals(
-                t.getTimestamp(), Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+            Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
 
-          } else if (a instanceof DateWritableV2) {
+          } else if (a instanceof DateWritable) {
             // Dates are stored as long, so convert and compare
 
-            DateWritableV2 adt = (DateWritableV2) a;
+            DateWritable adt = (DateWritable) a;
             long b = ((LongColumnVector) cv).vector[rowId];
-            Assert.assertEquals(adt.get().toEpochMilli(),
-                DateWritableV2.daysToMillis((int) b));
+            Assert.assertEquals(adt.get().getTime(),
+                DateWritable.daysToMillis((int) b));
 
           } else if (a instanceof HiveDecimalWritable) {
             // Decimals are stored as BigInteger, so convert and compare

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
index 1d32afe..a230441 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -63,6 +62,7 @@ import org.apache.parquet.io.api.Binary;
 import org.apache.parquet.schema.MessageType;
 
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.List;
 
@@ -209,9 +209,7 @@ public class VectorizedColumnReaderTestBase {
   }
 
   protected static NanoTime getNanoTime(int index) {
-    Timestamp ts = new Timestamp();
-    ts.setTimeInMillis(index);
-    return NanoTimeUtils.getNanoTime(ts, false);
+    return NanoTimeUtils.getNanoTime(new Timestamp(index), false);
   }
 
   protected static HiveDecimal getDecimal(
@@ -378,13 +376,8 @@ public class VectorizedColumnReaderTestBase {
           if (c == nElements) {
             break;
           }
-          Timestamp expected = new Timestamp();
-          if (isDictionaryEncoding) {
-            expected.setTimeInMillis(c % UNIQUE_NUM);
-          } else {
-            expected.setTimeInMillis(c);
-          }
-          assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c);
+          assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i));
           assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
           assertFalse(vector.isNull[i]);
           c++;
@@ -415,12 +408,8 @@ public class VectorizedColumnReaderTestBase {
             break;
           }
 
-          Timestamp expected = new Timestamp();
-          if (isDictionaryEncoding) {
-            expected.setTimeInMillis(c % UNIQUE_NUM);
-          } else {
-            expected.setTimeInMillis(c);
-          };
+          Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(
+              c);
           String actual = new String(Arrays
               .copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
           assertEquals("Not the same time at " + c, expected.toString(), actual);


[13/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge6.q.out b/ql/src/test/results/clientpositive/orc_merge6.q.out
index 7f3d3d3..7c429d6 100644
--- a/ql/src/test/results/clientpositive/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge6.q.out
@@ -38,17 +38,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n4
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -57,18 +57,18 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
                   outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     keys: year (type: string), hour (type: int)
                     mode: hash
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string), _col1 (type: int)
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -76,14 +76,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: int)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -156,9 +156,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
@@ -207,17 +207,17 @@ STAGE PLANS:
           TableScan
             alias: orc_merge5_n4
             filterExpr: (userid <= 13L) (type: boolean)
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -226,18 +226,18 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
                   outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     keys: year (type: string), hour (type: int)
                     mode: hash
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string), _col1 (type: int)
                       sort order: ++
                       Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -245,14 +245,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col2 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col6 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: string), _col1 (type: int)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -430,9 +430,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
-Found 1 items
+Found 3 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
index c83c416..6295714 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
@@ -37,17 +37,17 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_merge5_n3
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (userid <= 13L) (type: boolean)
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                       output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -56,7 +56,7 @@ STAGE PLANS:
                 Select Operator
                   expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp)
                   outputColumnNames: userid, string1, subtype, decimal1, ts
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                     mode: hash

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
index ef75520..95fa5ca 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
@@ -37,14 +37,14 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_merge5
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp), subtype (type: double)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                     output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -53,18 +53,18 @@ STAGE PLANS:
               Select Operator
                 expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(38,0)), _col4 (type: timestamp), _col5 (type: double)
                 outputColumnNames: userid, string1, subtype, decimal1, ts, st
-                Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
                   keys: st (type: double)
                   mode: hash
                   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: double)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: double)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>), _col3 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,bitvector:binary>), _col4 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,bitvector:binary>), _col5 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>)
       Reduce Operator Tree:
         Group By Operator
@@ -72,14 +72,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: double)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-          Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col1 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col2 (type: struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col3 (type: struct<columntype:string,min:double,max:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col4 (type: struct<columntype:string,min:decimal(38,0),max:decimal(38,0),countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col5 (type: struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>), _col0 (type: double)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-            Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_ppd_char.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_ppd_char.q.out b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
index 95766b0..846de53 100644
--- a/ql/src/test/results/clientpositive/orc_ppd_char.q.out
+++ b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -98,7 +98,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -107,7 +107,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -134,7 +134,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -143,7 +143,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("carrot")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "carrot"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n4
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "carrot" and "carrot1"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n4

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
index f1a137c..6dd6e3f 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -87,8 +87,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -246,8 +246,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -280,8 +280,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -355,8 +355,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -388,8 +388,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -417,8 +417,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -563,8 +563,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -597,8 +597,8 @@ FROM     alltypesparquet
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
index 357d838..e33e701 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -74,8 +74,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -155,11 +155,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -183,11 +183,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -237,11 +237,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -294,8 +294,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -369,11 +369,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -397,11 +397,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesparquet
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
index 5a8c069..485bfe7 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
@@ -89,13 +89,13 @@ POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test_parquet where cin
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test_parquet
 #### A masked pattern was here ####
-1970-01-07	-7959.5837837838
-1970-01-07	-2516.4135135135
-1970-01-07	-9445.0621621622
-1970-01-07	-5713.7459459459
-1970-01-07	8963.6405405405
-1970-01-07	4193.6243243243
-1970-01-07	2964.3864864865
-1970-01-07	-4673.2540540541
-1970-01-07	-9216.8945945946
-1970-01-07	-9287.3756756757
+1970-01-06	-7959.5837837838
+1970-01-06	-2516.4135135135
+1970-01-06	-9445.0621621622
+1970-01-06	-5713.7459459459
+1970-01-06	8963.6405405405
+1970-01-06	4193.6243243243
+1970-01-06	2964.3864864865
+1970-01-06	-4673.2540540541
+1970-01-06	-9216.8945945946
+1970-01-06	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/partition_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_timestamp.q.out b/ql/src/test/results/clientpositive/partition_timestamp.q.out
index a80ed2b..34f70a5 100644
--- a/ql/src/test/results/clientpositive/partition_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/partition_timestamp.q.out
@@ -14,79 +14,79 @@ PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-
   select * from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 01:00:00', region= '1')
   select * from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2')
   select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
+PREHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2000-01-01 02:00:00', region= '2')
   select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2000-01-01 02:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20')
   select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 01:00:00', region= '2020-20-20')
   select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 01:00:00.0,region=2020-20-20).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1')
   select * from src tablesample (20 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 02:00:00', region= '1')
   select * from src tablesample (20 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 02:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10')
   select * from src tablesample (11 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
+PREHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
 POSTHOOK: query: insert overwrite table partition_timestamp_1 partition(dt='2001-01-01 03:00:00', region= '10')
   select * from src tablesample (11 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp_1 PARTITION(dt=2001-01-01 03:00:00.0,region=10).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: select distinct dt from partition_timestamp_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp_1
-PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
+PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp_1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00/region=2020-20-20
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00/region=10
+POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 01%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp_1@dt=2000-01-01 02%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 01%3A00%3A00.0/region=2020-20-20
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 02%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp_1@dt=2001-01-01 03%3A00%3A00.0/region=10
 #### A masked pattern was here ####
 2000-01-01 01:00:00
 2000-01-01 02:00:00


[28/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
index cffd10b..7d3c3f4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
@@ -17,14 +17,15 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
@@ -37,7 +38,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 @NDV(maxNdv = 1)
 public class GenericUDFCurrentDate extends GenericUDF {
 
-  protected DateWritableV2 currentDate;
+  protected DateWritable currentDate;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
@@ -51,7 +52,7 @@ public class GenericUDFCurrentDate extends GenericUDF {
     if (currentDate == null) {
       Date dateVal =
           Date.valueOf(SessionState.get().getQueryCurrentTimestamp().toString().substring(0, 10));
-      currentDate = new DateWritableV2(dateVal);
+      currentDate = new DateWritable(dateVal);
     }
 
     return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
@@ -62,11 +63,11 @@ public class GenericUDFCurrentDate extends GenericUDF {
     return currentDate;
   }
 
-  public DateWritableV2 getCurrentDate() {
+  public DateWritable getCurrentDate() {
     return currentDate;
   }
 
-  public void setCurrentDate(DateWritableV2 currentDate) {
+  public void setCurrentDate(DateWritable currentDate) {
     this.currentDate = currentDate;
   }
 
@@ -82,7 +83,7 @@ public class GenericUDFCurrentDate extends GenericUDF {
     // Need to preserve currentDate
     GenericUDFCurrentDate other = (GenericUDFCurrentDate) newInstance;
     if (this.currentDate != null) {
-      other.currentDate = new DateWritableV2(this.currentDate);
+      other.currentDate = new DateWritable(this.currentDate);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
index d9447f1..9da51c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
@@ -17,14 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
@@ -37,7 +36,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 @NDV(maxNdv = 1)
 public class GenericUDFCurrentTimestamp extends GenericUDF {
 
-  protected TimestampWritableV2 currentTimestamp;
+  protected TimestampWritable currentTimestamp;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
@@ -49,9 +48,7 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     }
 
     if (currentTimestamp == null) {
-      java.sql.Timestamp ts = SessionState.get().getQueryCurrentTimestamp();
-      currentTimestamp = new TimestampWritableV2(
-          Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
+      currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp());
     }
 
     return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
@@ -62,11 +59,11 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     return currentTimestamp;
   }
 
-  public TimestampWritableV2 getCurrentTimestamp() {
+  public TimestampWritable getCurrentTimestamp() {
     return currentTimestamp;
   }
 
-  public void setCurrentTimestamp(TimestampWritableV2 currentTimestamp) {
+  public void setCurrentTimestamp(TimestampWritable currentTimestamp) {
     this.currentTimestamp = currentTimestamp;
   }
 
@@ -81,7 +78,7 @@ public class GenericUDFCurrentTimestamp extends GenericUDF {
     // Need to preserve currentTimestamp
     GenericUDFCurrentTimestamp other = (GenericUDFCurrentTimestamp) newInstance;
     if (this.currentTimestamp != null) {
-      other.currentTimestamp = new TimestampWritableV2(this.currentTimestamp);
+      other.currentTimestamp = new TimestampWritable(this.currentTimestamp);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index f5c4eb5..b73893d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -27,8 +28,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -56,8 +57,8 @@ public class GenericUDFDate extends GenericUDF {
   private transient PrimitiveCategory inputType;
   private transient PrimitiveObjectInspector argumentOI;
   private transient DateParser dateParser = new DateParser();
-  private transient final DateWritableV2 output = new DateWritableV2();
-  private transient final Date date = new Date();
+  private transient final DateWritable output = new DateWritable();
+  private transient final Date date = new Date(0);
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
@@ -116,13 +117,13 @@ public class GenericUDFDate extends GenericUDF {
       }
       break;
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get()))
+      Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
           .getTimestamp();
-      output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
+      output.set(DateWritable.millisToDays(ts.getTime()));
       break;
     case TIMESTAMPLOCALTZ:
     case DATE:
-      DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
+      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
       output.set(dw);
       break;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
index be7bd17..8ba103b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -29,9 +30,9 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddColScal
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddScalarCol;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -64,12 +65,12 @@ import org.apache.hive.common.util.DateParser;
 @VectorizedExpressions({VectorUDFDateAddColScalar.class, VectorUDFDateAddScalarCol.class, VectorUDFDateAddColCol.class})
 public class GenericUDFDateAdd extends GenericUDF {
   private transient final DateParser dateParser = new DateParser();
-  private transient final Date dateVal = new Date();
+  private transient final Date dateVal = new Date(0);
   private transient Converter dateConverter;
   private transient Converter daysConverter;
   private transient PrimitiveCategory inputType1;
   private transient PrimitiveCategory inputType2;
-  private final DateWritableV2 output = new DateWritableV2();
+  private final DateWritable output = new DateWritable();
   protected int signModifier = 1;  // 1 for addition, -1 for subtraction
 
   @Override
@@ -162,7 +163,7 @@ public class GenericUDFDateAdd extends GenericUDF {
       return null;
     }
 
-    // Convert the first param into a DateWritableV2 value
+    // Convert the first param into a DateWritable value
     switch (inputType1) {
     case STRING:
       String dateString = dateConverter.convert(arguments[0].get()).toString();
@@ -173,12 +174,12 @@ public class GenericUDFDateAdd extends GenericUDF {
       }
       break;
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get()))
+      Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get()))
         .getTimestamp();
-      output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
+      output.set(DateWritable.millisToDays(ts.getTime()));
       break;
     case DATE:
-      DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
+      DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
       output.set(dw.getDays());
       break;
     default:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
index 00386c8..e9cbcf7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
@@ -17,8 +17,11 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.sql.Date;
+
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -29,9 +32,9 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffColCol
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffColScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffScalarCol;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -40,7 +43,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.Pr
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.io.IntWritable;
 
 import javax.annotation.Nullable;
@@ -63,6 +65,7 @@ import javax.annotation.Nullable;
         + "  1")
 @VectorizedExpressions({VectorUDFDateDiffColScalar.class, VectorUDFDateDiffColCol.class, VectorUDFDateDiffScalarCol.class})
 public class GenericUDFDateDiff extends GenericUDF {
+  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient Converter inputConverter1;
   private transient Converter inputConverter2;
   private IntWritable output = new IntWritable();
@@ -113,25 +116,21 @@ public class GenericUDFDateDiff extends GenericUDF {
     case CHAR:
       String dateString = converter.convert(argument.get()).toString();
       try {
-        return Date.valueOf(dateString);
-      } catch (IllegalArgumentException e) {
-        Timestamp ts = PrimitiveObjectInspectorUtils.getTimestampFromString(dateString);
-        if (ts != null) {
-          return Date.ofEpochMilli(ts.toEpochMilli());
-        }
+        return new Date(formatter.parse(dateString).getTime());
+      } catch (ParseException e) {
         return null;
       }
     case TIMESTAMP:
-      Timestamp ts = ((TimestampWritableV2) converter.convert(argument.get()))
+      Timestamp ts = ((TimestampWritable) converter.convert(argument.get()))
         .getTimestamp();
-      return Date.ofEpochMilli(ts.toEpochMilli());
+      return new Date(ts.getTime());
     case DATE:
-      DateWritableV2 dw = (DateWritableV2) converter.convert(argument.get());
+      DateWritable dw = (DateWritable) converter.convert(argument.get());
       return dw.get();
     case TIMESTAMPLOCALTZ:
       TimestampTZ tsz = ((TimestampLocalTZWritable) converter.convert(argument.get()))
           .getTimestampTZ();
-      return Date.ofEpochMilli(tsz.getEpochSecond() * 1000l);
+      return new Date(tsz.getEpochSecond() * 1000l);
     default:
       throw new UDFArgumentException(
         "TO_DATE() only takes STRING/TIMESTAMP/TIMESTAMPLOCALTZ types, got " + inputType);
@@ -176,7 +175,7 @@ public class GenericUDFDateDiff extends GenericUDF {
       return null;
     }
 
-    result.set(DateWritableV2.dateToDays(date) - DateWritableV2.dateToDays(date2));
+    result.set(DateWritable.dateToDays(date) - DateWritable.dateToDays(date2));
     return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
index 6d3e86f..6b775d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
@@ -21,10 +21,8 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -46,15 +44,14 @@ import org.apache.hadoop.io.Text;
 @Description(name = "date_format", value = "_FUNC_(date/timestamp/string, fmt) - converts a date/timestamp/string "
     + "to a value of string in the format specified by the date format fmt.",
     extended = "Supported formats are SimpleDateFormat formats - "
-        + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
-        + "Second argument fmt should be constant.\n"
-        + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
+    + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
+    + "Second argument fmt should be constant.\n"
+    + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
 public class GenericUDFDateFormat extends GenericUDF {
   private transient Converter[] tsConverters = new Converter[2];
   private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
   private transient Converter[] dtConverters = new Converter[2];
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
-  private final java.util.Date date = new java.util.Date();
   private final Text output = new Text();
   private transient SimpleDateFormat formatter;
 
@@ -80,7 +77,6 @@ public class GenericUDFDateFormat extends GenericUDF {
       if (fmtStr != null) {
         try {
           formatter = new SimpleDateFormat(fmtStr);
-          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
         } catch (IllegalArgumentException e) {
           // ignore
         }
@@ -101,16 +97,14 @@ public class GenericUDFDateFormat extends GenericUDF {
     }
     // the function should support both short date and full timestamp format
     // time part of the timestamp should not be skipped
-    Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
-    if (ts == null) {
-      Date d = getDateValue(arguments, 0, dtInputTypes, dtConverters);
-      if (d == null) {
+    Date date = getTimestampValue(arguments, 0, tsConverters);
+    if (date == null) {
+      date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+      if (date == null) {
         return null;
       }
-      ts = Timestamp.ofEpochMilli(d.toEpochMilli());
     }
 
-    date.setTime(ts.toEpochMilli());
     String res = formatter.format(date);
     if (res == null) {
       return null;
@@ -128,4 +122,4 @@ public class GenericUDFDateFormat extends GenericUDF {
   protected String getFuncName() {
     return "date_format";
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
index bcc4114..eaab703 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
@@ -17,14 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.text.SimpleDateFormat;
-
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubScalarCol;
 
+import java.text.SimpleDateFormat;
+
 /**
  * UDFDateSub.
  *

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
index 67aec82..8691ed1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
@@ -17,23 +17,23 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
 import java.util.TimeZone;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.common.type.TimestampTZ;
-import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 @Description(name = "from_utc_timestamp",
              value = "from_utc_timestamp(timestamp, string timezone) - "
@@ -45,6 +45,7 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
   private transient PrimitiveObjectInspector[] argumentOIs;
   private transient TimestampConverter timestampConverter;
   private transient TextConverter textConverter;
+  private transient SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private transient TimeZone tzUTC = TimeZone.getTimeZone("UTC");
 
   @Override
@@ -69,6 +70,26 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
     return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
   }
 
+  /**
+   * Parse the timestamp string using the input TimeZone.
+   * This does not parse fractional seconds.
+   * @param tsString
+   * @param tz
+   * @return
+   */
+  protected Timestamp timestampFromString(String tsString, TimeZone tz) {
+    dateFormat.setTimeZone(tz);
+    try {
+      java.util.Date date = dateFormat.parse(tsString);
+      if (date == null) {
+        return null;
+      }
+      return new Timestamp(date.getTime());
+    } catch (ParseException err) {
+      return null;
+    }
+  }
+
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     Object o0 = arguments[0].get();
@@ -85,7 +106,7 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
       return null;
     }
 
-    Timestamp inputTs = ((TimestampWritableV2) converted_o0).getTimestamp();
+    Timestamp inputTs = ((TimestampWritable) converted_o0).getTimestamp();
 
     String tzStr = textConverter.convert(o1).toString();
     TimeZone timezone = TimeZone.getTimeZone(tzStr);
@@ -102,15 +123,21 @@ public class GenericUDFFromUtcTimestamp extends GenericUDF {
 
     // inputTs is the year/month/day/hour/minute/second in the local timezone.
     // For this UDF we want it in the timezone represented by fromTz
-    TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId());
+    Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz);
     if (fromTs == null) {
       return null;
     }
 
     // Now output this timestamp's millis value to the equivalent toTz.
-    Timestamp result = Timestamp.valueOf(
-        fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime().toString());
+    dateFormat.setTimeZone(toTz);
+    Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs));
+
+    if (inputTs.getNanos() != 0) {
+      result.setNanos(inputTs.getNanos());
+    }
+
     return result;
+
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
index 733fe63..d739af9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
@@ -20,14 +20,13 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorInBloomFilterColDynamicValue;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -38,8 +37,10 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.BloomKFilter;
 
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.sql.Timestamp;
 
 /**
  * GenericUDF to lookup a value in BloomFilter
@@ -146,13 +147,13 @@ public class GenericUDFInBloomFilter extends GenericUDF {
         int startIdx = vDecimal.toBytes(scratchBuffer);
         return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
       case DATE:
-        DateWritableV2 vDate = ((DateObjectInspector) valObjectInspector).
+        DateWritable vDate = ((DateObjectInspector) valObjectInspector).
                 getPrimitiveWritableObject(arguments[0].get());
         return bloomFilter.testLong(vDate.getDays());
       case TIMESTAMP:
         Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).
                 getPrimitiveJavaObject(arguments[0].get());
-        return bloomFilter.testLong(vTimeStamp.toEpochMilli());
+        return bloomFilter.testLong(vTimeStamp.getTime());
       case CHAR:
         Text vChar = ((HiveCharObjectInspector) valObjectInspector).
                 getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
index dceace5..238eff9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
@@ -20,8 +20,9 @@ package org.apache.hadoop.hive.ql.udf.generic;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
+import java.util.Calendar;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -30,6 +31,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.C
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * GenericUDFLastDay.
@@ -46,7 +48,7 @@ import org.apache.hadoop.io.Text;
 public class GenericUDFLastDay extends GenericUDF {
   private transient Converter[] converters = new Converter[1];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
-  private final Date date = new Date();
+  private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
 
   @Override
@@ -65,13 +67,14 @@ public class GenericUDFLastDay extends GenericUDF {
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    Date d = getDateValue(arguments, 0, inputTypes, converters);
-    if (d == null) {
+    Date date = getDateValue(arguments, 0, inputTypes, converters);
+    if (date == null) {
       return null;
     }
 
-    lastDay(d);
-    output.set(date.toString());
+    lastDay(date);
+    Date newDate = calendar.getTime();
+    output.set(DateUtils.getDateFormat().format(newDate));
     return output;
   }
 
@@ -85,9 +88,10 @@ public class GenericUDFLastDay extends GenericUDF {
     return "last_day";
   }
 
-  protected Date lastDay(Date d) {
-    date.setTimeInDays(d.toEpochDay());
-    date.setDayOfMonth(date.lengthOfMonth());
-    return date;
+  protected Calendar lastDay(Date d) {
+    calendar.setTime(d);
+    int maxDd = calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
+    calendar.set(Calendar.DAY_OF_MONTH, maxDd);
+    return calendar;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
index 27c3bf8..bf2ec82 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
@@ -19,7 +19,8 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -217,12 +218,11 @@ class MaskTransformer extends AbstractTransformer {
 
   @Override
   Date transform(final Date value) {
-    int actualMonthValue = maskedMonthValue + 1;
     int year  = maskedYearValue  == UNMASKED_VAL ? value.getYear()  : maskedYearValue;
-    int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : actualMonthValue;
-    int day   = maskedDayValue   == UNMASKED_VAL ? value.getDay()  : maskedDayValue;
+    int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : maskedMonthValue;
+    int day   = maskedDayValue   == UNMASKED_VAL ? value.getDate()  : maskedDayValue;
 
-    return Date.of(year, month, day);
+    return new Date(year, month, day);
   }
 
   protected int transformChar(final int c) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
index a068541..8b1e988 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+
 import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
index e0db417..d04e135 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
@@ -29,10 +29,8 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 
 import java.math.BigDecimal;
 import java.util.Calendar;
-import java.util.TimeZone;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -61,13 +59,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
     + " Example:\n"
     + "  > SELECT _FUNC_('1997-02-28 10:30:00', '1996-10-30');\n 3.94959677")
 public class GenericUDFMonthsBetween extends GenericUDF {
-
   private transient Converter[] tsConverters = new Converter[2];
   private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
   private transient Converter[] dtConverters = new Converter[2];
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
-  private final Calendar cal1 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
-  private final Calendar cal2 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar cal1 = Calendar.getInstance();
+  private final Calendar cal2 = Calendar.getInstance();
   private final DoubleWritable output = new DoubleWritable();
   private boolean isRoundOffNeeded = true;
 
@@ -106,33 +103,31 @@ public class GenericUDFMonthsBetween extends GenericUDF {
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
     // the function should support both short date and full timestamp format
     // time part of the timestamp should not be skipped
-    Timestamp date1 = getTimestampValue(arguments, 0, tsConverters);
+    Date date1 = getTimestampValue(arguments, 0, tsConverters);
     if (date1 == null) {
-      Date date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
-      if (date == null) {
+      date1 = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+      if (date1 == null) {
         return null;
       }
-      date1 = Timestamp.ofEpochMilli(date.toEpochMilli());
     }
 
-    Timestamp date2 = getTimestampValue(arguments, 1, tsConverters);
+    Date date2 = getTimestampValue(arguments, 1, tsConverters);
     if (date2 == null) {
-      Date date = getDateValue(arguments, 1, dtInputTypes, dtConverters);
-      if (date == null) {
+      date2 = getDateValue(arguments, 1, dtInputTypes, dtConverters);
+      if (date2 == null) {
         return null;
       }
-      date2 = Timestamp.ofEpochMilli(date.toEpochMilli());
     }
 
-    cal1.setTimeInMillis(date1.toEpochMilli());
-    cal2.setTimeInMillis(date2.toEpochMilli());
+    cal1.setTime(date1);
+    cal2.setTime(date2);
 
     // skip day/time part if both dates are end of the month
     // or the same day of the month
     int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH));
     if (cal1.get(DATE) == cal2.get(DATE)
         || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2
-        .getActualMaximum(DATE))) {
+            .getActualMaximum(DATE))) {
       output.set(monDiffInt);
       return output;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
index c700797..e74bae3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
@@ -28,7 +28,9 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -38,8 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.C
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
-
-import java.util.Calendar;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * GenericUDFNextDay.
@@ -54,10 +55,9 @@ import java.util.Calendar;
         + " 'yyyy-MM-dd'. day_of_week is day of the week (e.g. Mo, tue, FRIDAY)."
         + "Example:\n " + " > SELECT _FUNC_('2015-01-14', 'TU') FROM src LIMIT 1;\n" + " '2015-01-20'")
 public class GenericUDFNextDay extends GenericUDF {
-
   private transient Converter[] converters = new Converter[2];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
-  private final Date date = new Date();
+  private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
   private transient int dayOfWeekIntConst;
   private transient boolean isDayOfWeekConst;
@@ -98,13 +98,14 @@ public class GenericUDFNextDay extends GenericUDF {
       return null;
     }
 
-    Date d = getDateValue(arguments, 0, inputTypes, converters);
-    if (d == null) {
+    Date date = getDateValue(arguments, 0, inputTypes, converters);
+    if (date == null) {
       return null;
     }
 
-    nextDay(d, dayOfWeekInt);
-    output.set(date.toString());
+    nextDay(date, dayOfWeekInt);
+    Date newDate = calendar.getTime();
+    output.set(DateUtils.getDateFormat().format(newDate));
     return output;
   }
 
@@ -118,10 +119,10 @@ public class GenericUDFNextDay extends GenericUDF {
     return "next_day";
   }
 
-  protected Date nextDay(Date d, int dayOfWeek) {
-    date.setTimeInDays(d.toEpochDay());
+  protected Calendar nextDay(Date date, int dayOfWeek) {
+    calendar.setTime(date);
 
-    int currDayOfWeek = date.getDayOfWeek();
+    int currDayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
 
     int daysToAdd;
     if (currDayOfWeek < dayOfWeek) {
@@ -130,9 +131,9 @@ public class GenericUDFNextDay extends GenericUDF {
       daysToAdd = 7 - currDayOfWeek + dayOfWeek;
     }
 
-    date.setTimeInDays(date.toEpochDay() + daysToAdd);
+    calendar.add(Calendar.DATE, daysToAdd);
 
-    return date;
+    return calendar;
   }
 
   protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
@@ -163,7 +164,6 @@ public class GenericUDFNextDay extends GenericUDF {
     return -1;
   }
 
-
   public static enum DayOfWeek {
     MON("MO", "MON", "MONDAY"), TUE("TU", "TUE", "TUESDAY"), WED("WE", "WED", "WEDNESDAY"), THU(
         "TH", "THU", "THURSDAY"), FRI("FR", "FRI", "FRIDAY"), SAT("SA", "SAT", "SATURDAY"), SUN(
@@ -201,5 +201,4 @@ public class GenericUDFNextDay extends GenericUDF {
       return fullName.equalsIgnoreCase(dayOfWeek);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
index 076ca51..e1673b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -58,8 +58,8 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
   protected transient Converter dt1Converter;
   protected transient Converter dt2Converter;
 
-  protected transient DateWritableV2 dateResult = new DateWritableV2();
-  protected transient TimestampWritableV2 timestampResult = new TimestampWritableV2();
+  protected transient DateWritable dateResult = new DateWritable();
+  protected transient TimestampWritable timestampResult = new TimestampWritable();
   protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
       new HiveIntervalYearMonthWritable();
   protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
@@ -222,7 +222,7 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
     }
   }
 
-  protected DateWritableV2 handleDateResult(Date result) {
+  protected DateWritable handleDateResult(Date result) {
     if (result == null) {
       return null;
     }
@@ -230,7 +230,7 @@ public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
     return dateResult;
   }
 
-  protected TimestampWritableV2 handleTimestampResult(Timestamp result) {
+  protected TimestampWritable handleTimestampResult(Timestamp result) {
     if (result == null) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
index 9295c8f..a57b373 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
@@ -18,23 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -56,8 +56,8 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
   protected transient int dtArgIdx;
   protected transient Converter dtConverter;
 
-  protected transient TimestampWritableV2 timestampResult = new TimestampWritableV2();
-  protected transient DateWritableV2 dateResult = new DateWritableV2();
+  protected transient TimestampWritable timestampResult = new TimestampWritable();
+  protected transient DateWritable dateResult = new DateWritable();
   protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
       new HiveIntervalDayTimeWritable();
   protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
@@ -217,7 +217,7 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
     }
   }
 
-  protected DateWritableV2 handleDateResult(Date result) {
+  protected DateWritable handleDateResult(Date result) {
     if (result == null) {
       return null;
     }
@@ -225,7 +225,7 @@ public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
     return dateResult;
   }
 
-  protected TimestampWritableV2 handleTimestampResult(Timestamp result) {
+  protected TimestampWritable handleTimestampResult(Timestamp result) {
     if (result == null) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
index cf8c26a..2406868 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
@@ -21,7 +21,9 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -43,6 +45,7 @@ import org.apache.hadoop.io.IntWritable;
 public class GenericUDFQuarter extends GenericUDF {
   private transient Converter[] converters = new Converter[1];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
+  private final Calendar calendar = Calendar.getInstance();
   private final IntWritable output = new IntWritable();
 
   @Override
@@ -62,7 +65,8 @@ public class GenericUDFQuarter extends GenericUDF {
     if (date == null) {
       return null;
     }
-    int month = date.getMonth() - 1;
+    calendar.setTime(date);
+    int month = calendar.get(Calendar.MONTH);
     int quarter = (month + 3) / 3;
 
     output.set(quarter);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
index 4fca5d5..f0fcf69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -163,7 +163,7 @@ public class GenericUDFReflect2 extends AbstractGenericUDFReflect {
         ((Text)returnObj).set((String)result);
         return returnObj;
       case TIMESTAMP:
-        ((TimestampWritableV2)returnObj).set((Timestamp)result);
+        ((TimestampWritable)returnObj).set((Timestamp)result);
         return returnObj;
       case BINARY:
         ((BytesWritable)returnObj).set((byte[])result, 0, ((byte[]) result).length);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
index 4d0e85d..5b55402 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
  * Example usage:
  * ... CAST(<Timestamp string> as TIMESTAMP) ...
  *
- * Creates a TimestampWritableV2 object using PrimitiveObjectInspectorConverter
+ * Creates a TimestampWritable object using PrimitiveObjectInspectorConverter
  *
  */
 @Description(name = "timestamp",

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
index 3c3796e..53dfae2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
@@ -18,12 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Timestamp;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 
+import org.apache.calcite.util.TimestampWithTimeZoneString;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -41,10 +41,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
 
 /**
  * deterministic version of UDFUnixTimeStamp. enforces argument
@@ -82,8 +84,6 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF {
       }
     }
 
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
-
     PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
     switch (arg1OI.getPrimitiveCategory()) {
       case CHAR:
@@ -171,7 +171,7 @@ public class GenericUDFToUnixTimeStamp extends GenericUDF {
   }
 
   protected static void setValueFromTs(LongWritable value, Timestamp timestamp) {
-    value.set(timestamp.toEpochSecond());
+    value.set(timestamp.getTime() / 1000);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index 7a7d13e..372db36 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -20,21 +20,24 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -74,6 +77,7 @@ import org.apache.hadoop.io.Text;
         + " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
 public class GenericUDFTrunc extends GenericUDF {
 
+  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient TimestampConverter timestampConverter;
   private transient Converter textConverter1;
   private transient Converter textConverter2;
@@ -84,7 +88,7 @@ public class GenericUDFTrunc extends GenericUDF {
   private transient Converter longConverter;
   private transient PrimitiveCategory inputType1;
   private transient PrimitiveCategory inputType2;
-  private final Date date = new Date();
+  private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
   private transient String fmtInput;
   private transient PrimitiveObjectInspector inputOI;
@@ -293,35 +297,36 @@ public class GenericUDFTrunc extends GenericUDF {
       fmtInput = textConverter2.convert(arguments[1].get()).toString();
     }
 
-    Date d;
+    Date date;
     switch (inputType1) {
     case STRING:
       String dateString = textConverter1.convert(arguments[0].get()).toString();
       try {
-        d = Date.valueOf(dateString.toString());
-      } catch (IllegalArgumentException e) {
+        date = formatter.parse(dateString.toString());
+      } catch (ParseException e) {
         return null;
       }
       break;
     case TIMESTAMP:
       Timestamp ts =
-          ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())).getTimestamp();
-      d = Date.ofEpochMilli(ts.toEpochMilli());
+          ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
+      date = ts;
       break;
     case DATE:
-      DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
-      d = dw.get();
+      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
+      date = dw.get();
       break;
     default:
       throw new UDFArgumentTypeException(0,
           "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
     }
 
-    if (evalDate(d) == null) {
+    if (evalDate(date) == null) {
       return null;
     }
 
-    output.set(date.toString());
+    Date newDate = calendar.getTime();
+    output.set(formatter.format(newDate));
     return output;
   }
 
@@ -422,22 +427,22 @@ public class GenericUDFTrunc extends GenericUDF {
     return getStandardDisplayString("trunc", children);
   }
 
-  private Date evalDate(Date d) throws UDFArgumentException {
-    date.setTimeInDays(d.toEpochDay());
+  private Calendar evalDate(Date d) throws UDFArgumentException {
+    calendar.setTime(d);
     if ("MONTH".equals(fmtInput) || "MON".equals(fmtInput) || "MM".equals(fmtInput)) {
-      date.setDayOfMonth(1);
-      return date;
+      calendar.set(Calendar.DAY_OF_MONTH, 1);
+      return calendar;
     } else if ("QUARTER".equals(fmtInput) || "Q".equals(fmtInput)) {
-      int month = date.getMonth() - 1;
+      int month = calendar.get(Calendar.MONTH);
       int quarter = month / 3;
-      int monthToSet = quarter * 3 + 1;
-      date.setMonth(monthToSet);
-      date.setDayOfMonth(1);
-      return date;
+      int monthToSet = quarter * 3;
+      calendar.set(Calendar.MONTH, monthToSet);
+      calendar.set(Calendar.DAY_OF_MONTH, 1);
+      return calendar;
     } else if ("YEAR".equals(fmtInput) || "YYYY".equals(fmtInput) || "YY".equals(fmtInput)) {
-      date.setMonth(1);
-      date.setDayOfMonth(1);
-      return date;
+      calendar.set(Calendar.MONTH, 0);
+      calendar.set(Calendar.DAY_OF_MONTH, 1);
+      return calendar;
     } else {
       return null;
     }
@@ -480,5 +485,5 @@ public class GenericUDFTrunc extends GenericUDF {
     }
     return output;
   }
-
+  
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
index 557ab79..8329831 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -45,7 +44,7 @@ public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp {
     } else {
       if (currentTimestamp == null) {
         currentTimestamp = new LongWritable(0);
-        setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().getTime()));
+        setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp());
         String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead.";
         SessionState.getConsole().printInfo(msg, false);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
index b34c4d6..b440d8d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
@@ -18,9 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.ptf;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Timestamp;
+import java.util.Date;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -558,7 +559,7 @@ class DateValueBoundaryScanner extends SingleValueBoundaryScanner {
     Date l2 = PrimitiveObjectInspectorUtils.getDate(v2,
         (PrimitiveObjectInspector) expressionDef.getOI());
     if (l1 != null && l2 != null) {
-        return (double)(l1.toEpochMilli() - l2.toEpochMilli())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
+        return (double)(l1.getTime() - l2.getTime())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
     }
     return l1 != l2; // True if only one date is null
   }
@@ -582,9 +583,9 @@ class TimestampValueBoundaryScanner extends SingleValueBoundaryScanner {
   public boolean isDistanceGreater(Object v1, Object v2, int amt) {
     if (v1 != null && v2 != null) {
       long l1 = PrimitiveObjectInspectorUtils.getTimestamp(v1,
-          (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
+          (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
       long l2 = PrimitiveObjectInspectorUtils.getTimestamp(v2,
-          (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
+          (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
       return (double)(l1-l2)/1000 > amt; // TODO: lossy conversion, distance is considered in seconds
     }
     return v1 != null || v2 != null; // True if only one value is null

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
index 16babbf..9a097af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.hive.ql.util;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hive.common.util.DateUtils;
-
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Calendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hive.common.util.DateUtils;
+
 
 public class DateTimeMath {
 
@@ -49,6 +49,7 @@ public class DateTimeMath {
   }
 
   protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  protected Calendar calLocal = Calendar.getInstance();
   protected NanosResult nanosResult = new NanosResult();
 
   //
@@ -61,22 +62,39 @@ public class DateTimeMath {
    * @param months
    * @return
    */
-  public long addMonthsToMillis(long millis, int months) {
+  public long addMonthsToMillisUtc(long millis, int months) {
     calUtc.setTimeInMillis(millis);
     calUtc.add(Calendar.MONTH, months);
     return calUtc.getTimeInMillis();
   }
 
-  public long addMonthsToNanos(long nanos, int months) {
-    long result = addMonthsToMillis(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
+  /**
+   * Perform month arithmetic to millis value using local time zone.
+   * @param millis
+   * @param months
+   * @return
+   */
+  public long addMonthsToMillisLocal(long millis, int months) {
+    calLocal.setTimeInMillis(millis);
+    calLocal.add(Calendar.MONTH, months);
+    return calLocal.getTimeInMillis();
+  }
+
+  public long addMonthsToNanosUtc(long nanos, int months) {
+    long result = addMonthsToMillisUtc(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
+    return result;
+  }
+
+  public long addMonthsToNanosLocal(long nanos, int months) {
+    long result = addMonthsToMillisLocal(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
     return result;
   }
 
   public long addMonthsToDays(long days, int months) {
-    long millis = DateWritableV2.daysToMillis((int) days);
-    millis = addMonthsToMillis(millis, months);
+    long millis = DateWritable.daysToMillis((int) days);
+    millis = addMonthsToMillisLocal(millis, months);
     // Convert millis result back to days
-    return DateWritableV2.millisToDays(millis);
+    return DateWritable.millisToDays(millis);
   }
 
   public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) {
@@ -84,19 +102,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp();
-    add(ts, interval, tsResult);
-
-    return tsResult;
-  }
-
-  @Deprecated
-  public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalYearMonth interval) {
-    if (ts == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
+    Timestamp tsResult = new Timestamp(0);
     add(ts, interval, tsResult);
 
     return tsResult;
@@ -107,21 +113,9 @@ public class DateTimeMath {
       return false;
     }
 
-    long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
-    result.setTimeInMillis(resultMillis, ts.getNanos());
-
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(java.sql.Timestamp ts, HiveIntervalYearMonth interval, java.sql.Timestamp result) {
-    if (ts == null || interval == null) {
-      return false;
-    }
-
     // Attempt to match Oracle semantics for timestamp arithmetic,
     // where timestamp arithmetic is done in UTC, then converted back to local timezone
-    long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
+    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     result.setNanos(ts.getNanos());
 
@@ -133,19 +127,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp();
-    add(interval, ts, tsResult);
-
-    return tsResult;
-  }
-
-  @Deprecated
-  public java.sql.Timestamp add(HiveIntervalYearMonth interval, java.sql.Timestamp ts) {
-    if (ts == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
+    Timestamp tsResult = new Timestamp(0);
     add(interval, ts, tsResult);
 
     return tsResult;
@@ -156,19 +138,9 @@ public class DateTimeMath {
       return false;
     }
 
-    long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
-    result.setTimeInMillis(resultMillis, ts.getNanos());
-
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(HiveIntervalYearMonth interval, java.sql.Timestamp ts, java.sql.Timestamp result) {
-    if (ts == null || interval == null) {
-      return false;
-    }
-
-    long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
+    // Attempt to match Oracle semantics for timestamp arithmetic,
+    // where timestamp arithmetic is done in UTC, then converted back to local timezone
+    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     result.setNanos(ts.getNanos());
 
@@ -180,19 +152,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Date dtResult = new Date();
-    add(dt, interval, dtResult);
-
-    return dtResult;
-  }
-
-  @Deprecated
-  public java.sql.Date add(java.sql.Date dt, HiveIntervalYearMonth interval) {
-    if (dt == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Date dtResult = new java.sql.Date(0);
+    Date dtResult = new Date(0);
     add(dt, interval, dtResult);
 
     return dtResult;
@@ -203,18 +163,9 @@ public class DateTimeMath {
       return false;
     }
 
-    long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
-    result.setTimeInMillis(resultMillis);
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(java.sql.Date dt, HiveIntervalYearMonth interval, java.sql.Date result) {
-    if (dt == null || interval == null) {
-      return false;
-    }
-
-    long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
+    // Since Date millis value is in local timezone representation, do date arithmetic
+    // using local timezone so the time remains at the start of the day.
+    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     return true;
   }
@@ -224,19 +175,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Date dtResult = new Date();
-    add(interval, dt, dtResult);
-
-    return dtResult;
-  }
-
-  @Deprecated
-  public java.sql.Date add(HiveIntervalYearMonth interval, java.sql.Date dt) {
-    if (dt == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Date dtResult = new java.sql.Date(0);
+    Date dtResult = new Date(0);
     add(interval, dt, dtResult);
 
     return dtResult;
@@ -247,18 +186,9 @@ public class DateTimeMath {
       return false;
     }
 
-    long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
-    result.setTimeInMillis(resultMillis);
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(HiveIntervalYearMonth interval, java.sql.Date dt, java.sql.Date result) {
-    if (dt == null || interval == null) {
-      return false;
-    }
-
-    long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
+    // Since Date millis value is in local timezone representation, do date arithmetic
+    // using local timezone so the time remains at the start of the day.
+    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
     result.setTime(resultMillis);
     return true;
   }
@@ -278,19 +208,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp();
-    subtract(left, right, tsResult);
-
-    return tsResult;
-  }
-
-  @Deprecated
-  public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalYearMonth right) {
-    if (left == null || right == null) {
-      return null;
-    }
-
-    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
+    Timestamp tsResult = new Timestamp(0);
     subtract(left, right, tsResult);
 
     return tsResult;
@@ -303,32 +221,12 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
-  @Deprecated
-  public boolean subtract(java.sql.Timestamp left, HiveIntervalYearMonth right, java.sql.Timestamp result) {
-    if (left == null || right == null) {
-      return false;
-    }
-    return add(left, right.negate(), result);
-  }
-
   public Date subtract(Date left, HiveIntervalYearMonth right) {
     if (left == null || right == null) {
       return null;
     }
 
-    Date dtResult = new Date();
-    subtract(left, right, dtResult);
-
-    return dtResult;
-  }
-
-  @Deprecated
-  public java.sql.Date subtract(java.sql.Date left, HiveIntervalYearMonth right) {
-    if (left == null || right == null) {
-      return null;
-    }
-
-    java.sql.Date dtResult = new java.sql.Date(0);
+    Date dtResult = new Date(0);
     subtract(left, right, dtResult);
 
     return dtResult;
@@ -341,14 +239,6 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
-  @Deprecated
-  public boolean subtract(java.sql.Date left, HiveIntervalYearMonth right, java.sql.Date result) {
-    if (left == null || right == null) {
-      return false;
-    }
-    return add(left, right.negate(), result);
-  }
-
   public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
     if (left == null || right == null) {
       return null;
@@ -365,19 +255,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp();
-    add(ts, interval, tsResult);
-
-    return tsResult;
-  }
-
-  @Deprecated
-  public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalDayTime interval) {
-    if (ts == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
+    Timestamp tsResult = new Timestamp(0);
     add(ts, interval, tsResult);
 
     return tsResult;
@@ -391,21 +269,6 @@ public class DateTimeMath {
 
     nanosResult.addNanos(ts.getNanos(), interval.getNanos());
 
-    long newMillis = ts.toEpochMilli()
-        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
-    result.setTimeInMillis(newMillis, nanosResult.nanos);
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(java.sql.Timestamp ts, HiveIntervalDayTime interval,
-      java.sql.Timestamp result) {
-    if (ts == null || interval == null) {
-      return false;
-    }
-
-    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
-
     long newMillis = ts.getTime()
         + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
     result.setTime(newMillis);
@@ -418,18 +281,7 @@ public class DateTimeMath {
       return null;
     }
 
-    Timestamp tsResult = new Timestamp();
-    add(interval, ts, tsResult);
-    return tsResult;
-  }
-
-  @Deprecated
-  public java.sql.Timestamp add(HiveIntervalDayTime interval, java.sql.Timestamp ts) {
-    if (ts == null || interval == null) {
-      return null;
-    }
-
-    java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
+    Timestamp tsResult = new Timestamp(0);
     add(interval, ts, tsResult);
     return tsResult;
   }
@@ -442,21 +294,6 @@ public class DateTimeMath {
 
     nanosResult.addNanos(ts.getNanos(), interval.getNanos());
 
-    long newMillis = ts.toEpochMilli()
-        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
-    result.setTimeInMillis(newMillis, nanosResult.nanos);
-    return true;
-  }
-
-  @Deprecated
-  public boolean add(HiveIntervalDayTime interval, java.sql.Timestamp ts,
-      java.sql.Timestamp result) {
-    if (ts == null || interval == null) {
-      return false;
-    }
-
-    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
-
     long newMillis = ts.getTime()
         + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
     result.setTime(newMillis);
@@ -495,14 +332,6 @@ public class DateTimeMath {
     return add(left, right.negate());
   }
 
-  @Deprecated
-  public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalDayTime right) {
-    if (left == null || right == null) {
-      return null;
-    }
-    return add(left, right.negate());
-  }
-
   public boolean subtract(Timestamp left, HiveIntervalDayTime right, Timestamp result) {
     if (left == null || right == null) {
       return false;
@@ -510,14 +339,6 @@ public class DateTimeMath {
     return add(left, right.negate(), result);
   }
 
-  @Deprecated
-  public boolean subtract(java.sql.Timestamp left, HiveIntervalDayTime right, java.sql.Timestamp result) {
-    if (left == null || right == null) {
-      return false;
-    }
-    return add(left, right.negate(), result);
-  }
-
   public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) {
     if (left == null || right == null) {
       return null;
@@ -544,18 +365,6 @@ public class DateTimeMath {
     return result;
   }
 
-  @Deprecated
-  public HiveIntervalDayTime subtract(java.sql.Timestamp left, java.sql.Timestamp right) {
-    if (left == null || right == null) {
-      return null;
-    }
-
-    HiveIntervalDayTime result = new HiveIntervalDayTime();
-    subtract(left, right, result);
-
-    return result;
-  }
-
   public boolean subtract(Timestamp left, Timestamp right,
       HiveIntervalDayTime result) {
     if (left == null || right == null) {
@@ -564,21 +373,6 @@ public class DateTimeMath {
 
     nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
 
-    long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.toEpochMilli())
-        - TimeUnit.MILLISECONDS.toSeconds(right.toEpochMilli()) + nanosResult.seconds;
-    result.set(totalSeconds, nanosResult.nanos);
-    return true;
-  }
-
-  @Deprecated
-  public boolean subtract(java.sql.Timestamp left, java.sql.Timestamp right,
-      HiveIntervalDayTime result) {
-    if (left == null || right == null) {
-      return false;
-    }
-
-    nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
-
     long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime())
         - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds;
     result.set(totalSeconds, nanosResult.nanos);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
index 40d60f3..613d7a8 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
@@ -28,19 +28,21 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFAscii;
 import org.apache.hadoop.hive.ql.udf.UDFLn;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -60,10 +62,10 @@ public class TestFunctionRegistry extends TestCase {
     public void one(IntWritable x, HiveDecimalWritable y) {}
     public void one(IntWritable x, DoubleWritable y) {}
     public void one(IntWritable x, IntWritable y) {}
-    public void mismatch(DateWritableV2 x, HiveDecimalWritable y) {}
-    public void mismatch(TimestampWritableV2 x, HiveDecimalWritable y) {}
+    public void mismatch(DateWritable x, HiveDecimalWritable y) {}
+    public void mismatch(TimestampWritable x, HiveDecimalWritable y) {}
     public void mismatch(BytesWritable x, DoubleWritable y) {}
-    public void typeaffinity1(DateWritableV2 x) {}
+    public void typeaffinity1(DateWritable x) {}
     public void typeaffinity1(DoubleWritable x) {};
     public void typeaffinity1(Text x) {}
     public void typeaffinity2(IntWritable x) {}
@@ -156,8 +158,8 @@ public class TestFunctionRegistry extends TestCase {
     typeAffinity("typeaffinity1", TypeInfoFactory.floatTypeInfo, 1, DoubleWritable.class);
 
     // Prefer date type arguments over other method signatures
-    typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritableV2.class);
-    typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritableV2.class);
+    typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritable.class);
+    typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritable.class);
 
     // String type affinity
     typeAffinity("typeaffinity1", TypeInfoFactory.stringTypeInfo, 1, Text.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index 90eb45b..fdc268c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -36,6 +36,7 @@ import static org.mockito.Mockito.when;
 import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
@@ -54,7 +55,6 @@ import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.Context;
@@ -132,7 +132,8 @@ public class TestUtilities {
 
   @Test
   public void testSerializeTimestamp() {
-    Timestamp ts = Timestamp.ofEpochMilli(1374554702000L, 123456);
+    Timestamp ts = new Timestamp(1374554702000L);
+    ts.setNanos(123456);
     ExprNodeConstantDesc constant = new ExprNodeConstantDesc(ts);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
     children.add(constant);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
index 7e5e5fb..c8ae73a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
@@ -19,17 +19,17 @@ package org.apache.hadoop.hive.ql.exec.persistence;
 import static org.junit.Assert.assertEquals;
 
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -55,7 +55,7 @@ public class TestRowContainer {
       ObjectInspectorUtils.getStandardObjectInspector(serde.getObjectInspector()));
     result.setTableDesc(
       PTFRowContainer.createTableDesc((StructObjectInspector) serde.getObjectInspector()));
-    TimestampWritableV2 key = new TimestampWritableV2(Timestamp.ofEpochMilli(10));
+    TimestampWritable key = new TimestampWritable(new Timestamp(10));
     result.setKeyObject(Lists.newArrayList(key));
     List<Writable> row;
     // will trigger 2 spills


[10/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
index c2687ca..9209d48 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_decimal_date.q.out
@@ -12,9 +12,9 @@ POSTHOOK: Lineage: date_decimal_test.cdate EXPRESSION [(alltypesorc)alltypesorc.
 POSTHOOK: Lineage: date_decimal_test.cdecimal EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
-PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -44,12 +44,12 @@ STAGE PLANS:
                     predicate: (cdouble is not null and cint is not null) (type: boolean)
                     Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: cdate (type: date), cint (type: int), cdecimal (type: decimal(20,10))
-                      outputColumnNames: _col0, _col1, _col2
+                      expressions: cdate (type: date), cdecimal (type: decimal(20,10))
+                      outputColumnNames: _col0, _col1
                       Select Vectorization:
                           className: VectorSelectOperator
                           native: true
-                          projectedOutputColumnNums: [2, 0, 3]
+                          projectedOutputColumnNums: [2, 3]
                       Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
                       Limit
                         Number of rows: 10
@@ -84,21 +84,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-1970-01-07	528534767	-7959.5837837838
-1970-01-07	528534767	-2516.4135135135
-1970-01-07	528534767	-9445.0621621622
-1970-01-07	528534767	-5713.7459459459
-1970-01-07	528534767	8963.6405405405
-1970-01-07	528534767	4193.6243243243
-1970-01-07	528534767	2964.3864864865
-1970-01-07	528534767	-4673.2540540541
-1970-01-07	528534767	-9216.8945945946
-1970-01-07	528534767	-9287.3756756757
+1970-01-06	-7959.5837837838
+1970-01-06	-2516.4135135135
+1970-01-06	-9445.0621621622
+1970-01-06	-5713.7459459459
+1970-01-06	8963.6405405405
+1970-01-06	4193.6243243243
+1970-01-06	2964.3864864865
+1970-01-06	-4673.2540540541
+1970-01-06	-9216.8945945946
+1970-01-06	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
index 76a9593..9879e22 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
@@ -263,7 +263,7 @@ WHERE  ((762 = cbigint)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--1.2803533196894065E7	-1.2807261196894065E7	1.2807261196894065E7	-1.2807261196894065E7	1.6402593936546838E14	-275125.557	1.2803533196894065E7	6.102557176084042E8	-2.1007230485194618E21	9480.304481867239	-6.102557176084042E8	6.230629788052982E8	3.8022774524605715E17	3.7261870682317882E17	-11.503947368421052	-3.7261870682317882E17	3.7261870682317882E17	1083935.5552547143	6.104250214589658E8	-1083935.5552547143	46.53705506862114	-51	1029	-4.705076768887381E-5	-46.53705506862114
+1.6000018929276082E8	1.5999646129276082E8	-1.5999646129276082E8	1.5999646129276082E8	2.5598867626205912E16	-8706342.964000002	-1.6000018929276082E8	5.481251832900263E8	4.095728233294762E24	8549.657499338193	-5.481251832900263E8	3.8812872199726546E8	2.12743126884874784E17	3.0054786945575117E17	-5.700752675298234	-3.0054786945575117E17	3.0054786945575117E17	973579.3664121248	5.482224634724039E8	-973579.3664121248	-18.377427808018613	-64	2044	-6.573680812059058E-5	18.377427808018613
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT MAX(cint),
        (MAX(cint) / -3728),
@@ -979,7 +979,7 @@ WHERE  (((ctimestamp2 <= ctimestamp1)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-17.0	6998.0	7015.0	1942088700	412.6470588235294	-6998.0	1.7455632335840696E8	17.0	2.9018961928004512E16	1.0774839990192407E18	-1942088700	-11.125857045077739	17.0	-2.8316279494225646E19
+-0.5934409161894847	6980.406559083811	6979.813118167622	2141851355	-11761.597368421053	-6980.406559083811	1.5852855222071928E8	-0.5934409161894847	2.5099887741860824E16	1.52140608502098611E18	-2141851355	-13.510823917813244	79.553	-3.998255191435152E19
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT cint,
        cdouble,
@@ -3697,7 +3697,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3705,7 +3705,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: []
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       Group By Vectorization:
@@ -3808,7 +3808,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3818,7 +3818,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0]
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(ctinyint)
                       Group By Vectorization:
@@ -3921,7 +3921,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3931,7 +3931,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [2]
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cint)
                       Group By Vectorization:
@@ -4034,7 +4034,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4044,7 +4044,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [4]
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cfloat)
                       Group By Vectorization:
@@ -4147,7 +4147,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4157,7 +4157,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [6]
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cstring1)
                       Group By Vectorization:
@@ -4260,7 +4260,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -4270,7 +4270,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [10]
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cboolean1)
                       Group By Vectorization:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
index 1791c89..1827f67 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
@@ -262,13 +262,13 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
-                    outputColumnNames: _col0, _col1, _col2, _col3, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
+                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17]
-                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:ti
 mestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp
+                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18]
+                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boole
 an, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp
                     Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: bigint)
@@ -278,7 +278,7 @@ STAGE PLANS:
                           native: true
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
+                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
             Execution mode: vectorized
             Map Vectorization:
                 enabled: true
@@ -299,12 +299,12 @@ STAGE PLANS:
                 vectorized: true
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: boolean), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp)
+                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                 Select Vectorization:
                     className: VectorSelectOperator
                     native: true
-                    projectedOutputColumnNums: [0, 1, 2, 3, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+                    projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
                 Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
@@ -369,14 +369,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
-1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
-490699811	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
+-45479000681	528	10	27	27	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+1632478712	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
+490725011	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:47.183	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:47.183	NULL	1969-12-31 15:59:47.183
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:52.843	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:52.843	NULL	1969-12-31 15:59:52.843
@@ -470,7 +470,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -561,14 +561,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--2736272726	1883	4	17	17	16	4	14	34
--62018199211	4	9	24	22	39	18	26	29
-1365554626	2013	4	10	10	15	0	43	46
-206730996125	8521	1	16	16	3	20	42	5
-271176065	1978	8	5	5	31	14	41	5
-501179874	1985	11	18	18	47	16	37	54
-501179874	1985	11	18	18	47	16	37	54
-94573819855	4966	12	4	4	49	9	30	55
+-2736243926	1883	4	17	17	16	4	14	34
+-62018170411	4	9	22	22	39	18	26	29
+1365579826	2013	4	10	10	15	0	43	46
+206731024925	8521	1	16	16	3	20	42	5
+271201265	1978	8	5	5	31	14	41	5
+501208674	1985	11	18	18	47	16	37	54
+501208674	1985	11	18	18	47	16	37	54
+94573848655	4966	12	4	4	49	9	30	55
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
@@ -662,7 +662,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
+                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (dayofmonth(ctimestamp1) = dayofmonth(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -854,7 +854,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -945,7 +945,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
@@ -1206,7 +1206,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-2.89160478029166E11
+2.89160863229166E11
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(avg(ctimestamp1), 0),
   variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19,
@@ -1363,4 +1363,4 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-3.6145059754E10	false	false	false	7.5245178084814E10	7.5245178084814E10	7.5245178084814E10	8.0440478971476E10
+3.6145107904E10	false	false	false	7.5245155692476E10	7.5245155692476E10	7.5245155692476E10	8.0440455033059E10

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
deleted file mode 100644
index 5fb5762..0000000
--- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original.q.out
+++ /dev/null
@@ -1,740 +0,0 @@
-PREHOOK: query: CREATE TEMPORARY FUNCTION runWorker AS 'org.apache.hadoop.hive.ql.udf.UDFRunWorker'
-PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: runworker
-POSTHOOK: query: CREATE TEMPORARY FUNCTION runWorker AS 'org.apache.hadoop.hive.ql.udf.UDFRunWorker'
-POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: runworker
-PREHOOK: query: create table mydual(a int)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@mydual
-POSTHOOK: query: create table mydual(a int)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@mydual
-PREHOOK: query: insert into mydual values(1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@mydual
-POSTHOOK: query: insert into mydual values(1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@mydual
-POSTHOOK: Lineage: mydual.a SCRIPT []
-PREHOOK: query: CREATE TABLE over10k_n2(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           `dec` decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over10k_n2
-POSTHOOK: query: CREATE TABLE over10k_n2(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           `dec` decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over10k_n2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over10k_n2
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over10k_n2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over10k_n2
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over10k_n2
-PREHOOK: query: CREATE TABLE over10k_orc_bucketed(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           `dec` decimal(4,2),
-           bin binary) CLUSTERED BY(si) INTO 4 BUCKETS STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: query: CREATE TABLE over10k_orc_bucketed(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           `dec` decimal(4,2),
-           bin binary) CLUSTERED BY(si) INTO 4 BUCKETS STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over10k_orc_bucketed
-PREHOOK: query: select distinct si, si%4 from over10k_n2 order by si
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_n2
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select distinct si, si%4 from over10k_n2 order by si
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_n2
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-NULL	NULL
-256	0
-257	1
-258	2
-259	3
-260	0
-261	1
-262	2
-263	3
-264	0
-265	1
-266	2
-267	3
-268	0
-269	1
-270	2
-271	3
-272	0
-273	1
-274	2
-275	3
-276	0
-277	1
-278	2
-279	3
-280	0
-281	1
-282	2
-283	3
-284	0
-285	1
-286	2
-287	3
-288	0
-289	1
-290	2
-291	3
-292	0
-293	1
-294	2
-295	3
-296	0
-297	1
-298	2
-299	3
-300	0
-301	1
-302	2
-303	3
-304	0
-305	1
-306	2
-307	3
-308	0
-309	1
-310	2
-311	3
-312	0
-313	1
-314	2
-315	3
-316	0
-317	1
-318	2
-319	3
-320	0
-321	1
-322	2
-323	3
-324	0
-325	1
-326	2
-327	3
-328	0
-329	1
-330	2
-331	3
-332	0
-333	1
-334	2
-335	3
-336	0
-337	1
-338	2
-339	3
-340	0
-341	1
-342	2
-343	3
-344	0
-345	1
-346	2
-347	3
-348	0
-349	1
-350	2
-351	3
-352	0
-353	1
-354	2
-355	3
-356	0
-357	1
-358	2
-359	3
-360	0
-361	1
-362	2
-363	3
-364	0
-365	1
-366	2
-367	3
-368	0
-370	2
-371	3
-372	0
-373	1
-374	2
-375	3
-376	0
-377	1
-378	2
-379	3
-380	0
-381	1
-382	2
-383	3
-384	0
-385	1
-386	2
-387	3
-388	0
-389	1
-390	2
-391	3
-392	0
-393	1
-394	2
-395	3
-396	0
-397	1
-398	2
-399	3
-400	0
-401	1
-402	2
-403	3
-404	0
-405	1
-406	2
-407	3
-408	0
-409	1
-410	2
-411	3
-413	1
-414	2
-415	3
-417	1
-418	2
-419	3
-420	0
-421	1
-422	2
-423	3
-424	0
-425	1
-426	2
-427	3
-428	0
-429	1
-430	2
-431	3
-432	0
-433	1
-434	2
-435	3
-436	0
-437	1
-438	2
-439	3
-440	0
-441	1
-442	2
-443	3
-444	0
-445	1
-446	2
-447	3
-448	0
-449	1
-450	2
-451	3
-452	0
-453	1
-454	2
-455	3
-456	0
-457	1
-458	2
-459	3
-460	0
-461	1
-462	2
-463	3
-464	0
-465	1
-466	2
-467	3
-468	0
-469	1
-471	3
-472	0
-473	1
-474	2
-475	3
-476	0
-477	1
-478	2
-479	3
-480	0
-481	1
-482	2
-483	3
-484	0
-485	1
-486	2
-487	3
-488	0
-489	1
-490	2
-491	3
-492	0
-493	1
-494	2
-495	3
-496	0
-497	1
-498	2
-499	3
-500	0
-501	1
-502	2
-503	3
-504	0
-505	1
-506	2
-507	3
-508	0
-509	1
-510	2
-511	3
-PREHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_n2
-PREHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_n2
-POSTHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: Lineage: over10k_orc_bucketed.b SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.bin SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.bo SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.d SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.dec SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.f SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.i SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.s SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.si SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:ts, type:timestamp, comment:null), ]
-Found 4 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
-PREHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_n2
-PREHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_n2
-POSTHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: Lineage: over10k_orc_bucketed.b SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.bin SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.bo SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.d SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.dec SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.f SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.i SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.s SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.si SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.t SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over10k_orc_bucketed.ts SIMPLE [(over10k_n2)over10k_n2.FieldSchema(name:ts, type:timestamp, comment:null), ]
-Found 8 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
-PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-7	hdfs://### HDFS PATH ###
-PREHOOK: query: alter table over10k_orc_bucketed set TBLPROPERTIES ('transactional'='true')
-PREHOOK: type: ALTERTABLE_PROPERTIES
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: query: alter table over10k_orc_bucketed set TBLPROPERTIES ('transactional'='true')
-POSTHOOK: type: ALTERTABLE_PROPERTIES
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: default@over10k_orc_bucketed
-PREHOOK: query: explain select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by t, si, i
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by t, si, i
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over10k_orc_bucketed
-                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                  Statistics: Num rows: 2098 Data size: 41920 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                    Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: t (type: tinyint), si (type: smallint), i (type: int)
-                      outputColumnNames: _col0, _col1, _col2
-                      Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int)
-                        sort order: +++
-                        Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-            Execution mode: vectorized
-        Reducer 2 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by  t, si, i
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by  t, si, i
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###
--3	344	65733
--3	344	65733
-5	501	65585
-5	501	65585
-35	463	65646
-35	463	65646
-PREHOOK: query: explain select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over10k_orc_bucketed
-                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                  Statistics: Num rows: 2098 Data size: 41920 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                    Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), t (type: tinyint), si (type: smallint), i (type: int)
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        sort order: +
-                        Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: tinyint), _col2 (type: smallint), _col3 (type: int)
-            Execution mode: vectorized
-        Reducer 2 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: int)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-{"writeid":0,"bucketid":536870912,"rowid":104}	5	501	65585
-{"writeid":0,"bucketid":536870912,"rowid":420}	5	501	65585
-{"writeid":0,"bucketid":536936448,"rowid":37}	-3	344	65733
-{"writeid":0,"bucketid":536936448,"rowid":295}	-3	344	65733
-{"writeid":0,"bucketid":537067520,"rowid":173}	35	463	65646
-{"writeid":0,"bucketid":537067520,"rowid":406}	35	463	65646
-PREHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
-PREHOOK: type: QUERY
-POSTHOOK: query: explain update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over10k_orc_bucketed
-                  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                  Statistics: Num rows: 2098 Data size: 706986 Basic stats: COMPLETE Column stats: COMPLETE
-                  Filter Operator
-                    predicate: ((b = 4294967363L) and (t < 100Y)) (type: boolean)
-                    Statistics: Num rows: 2 Data size: 674 Basic stats: COMPLETE Column stats: COMPLETE
-                    Select Operator
-                      expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), t (type: tinyint), si (type: smallint), f (type: float), d (type: double), bo (type: boolean), s (type: string), ts (type: timestamp), dec (type: decimal(4,2)), bin (type: binary)
-                      outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                      Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        sort order: +
-                        Map-reduce partition columns: UDFToInteger(_col0) (type: int)
-                        Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: tinyint), _col2 (type: smallint), _col5 (type: float), _col6 (type: double), _col7 (type: boolean), _col8 (type: string), _col9 (type: timestamp), _col10 (type: decimal(4,2)), _col11 (type: binary)
-            Execution mode: vectorized
-        Reducer 2 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>), VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), 0 (type: int), 4294967363L (type: bigint), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 (type: boolean), VALUE._col6 (type: string), VALUE._col7 (type: timestamp), VALUE._col8 (type: decimal(4,2)), VALUE._col9 (type: binary)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
-                Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.over10k_orc_bucketed
-                  Write Type: UPDATE
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.over10k_orc_bucketed
-          Write Type: UPDATE
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-
-PREHOOK: query: update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: default@over10k_orc_bucketed
-POSTHOOK: query: update over10k_orc_bucketed set i = 0 where b = 4294967363 and t < 100
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: default@over10k_orc_bucketed
-PREHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select ROW__ID, t, si, i from over10k_orc_bucketed where b = 4294967363 and t < 100 order by ROW__ID
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-{"writeid":### Masked writeid ###,"bucketid":536870912,"rowid":0}	5	501	0
-{"writeid":### Masked writeid ###,"bucketid":536870912,"rowid":1}	5	501	0
-{"writeid":### Masked writeid ###,"bucketid":536936448,"rowid":0}	-3	344	0
-{"writeid":### Masked writeid ###,"bucketid":536936448,"rowid":1}	-3	344	0
-{"writeid":### Masked writeid ###,"bucketid":537067520,"rowid":0}	35	463	0
-{"writeid":### Masked writeid ###,"bucketid":537067520,"rowid":1}	35	463	0
-PREHOOK: query: explain select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over10k_orc_bucketed
-                  Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
-                  Select Operator
-                    expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                    outputColumnNames: ROW__ID
-                    Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: count()
-                      keys: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                      mode: hash
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col1 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                keys: KEY._col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
-                Filter Operator
-                  predicate: (_col1 > 1L) (type: boolean)
-                  Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select ROW__ID, count(*) from over10k_orc_bucketed group by ROW__ID having count(*) > 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-PREHOOK: query: select ROW__ID, * from over10k_orc_bucketed where ROW__ID is null
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over10k_orc_bucketed
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select ROW__ID, * from over10k_orc_bucketed where ROW__ID is null
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over10k_orc_bucketed
-POSTHOOK: Output: hdfs://### HDFS PATH ###

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
index 2531f4a..88499fd 100644
--- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
+++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
@@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 4 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_n9
@@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
 Found 8 items
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       8903 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7698 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7273 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw-   3 ### USER ### ### GROUP ###       7180 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       8914 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7709 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7284 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw-   3 ### USER ### ### GROUP ###       7190 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k_orc_bucketed_n0
@@ -680,22 +680,22 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: over10k_orc_bucketed_n0
-                  Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                     outputColumnNames: ROW__ID
-                    Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       keys: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
         Reducer 2 
             Reduce Operator Tree:
@@ -704,13 +704,13 @@ STAGE PLANS:
                 keys: KEY._col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (_col1 > 1L) (type: boolean)
-                  Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat


[23/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/results_cache_2.q.out b/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
index edce6c3..25b8dc3 100644
--- a/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
@@ -103,7 +103,7 @@ group by c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-2012-01-01 09:02:03	10
+2012-01-01 01:02:03	10
 test.comment=Queries using non-deterministic functions should not use results cache
 PREHOOK: query: explain
 select c1, count(*)
@@ -139,7 +139,7 @@ STAGE PLANS:
                       Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
                       Group By Operator
                         aggregations: count()
-                        keys: TIMESTAMP'2012-01-01 09:02:03' (type: timestamp)
+                        keys: TIMESTAMP'2012-01-01 01:02:03.0' (type: timestamp)
                         mode: hash
                         outputColumnNames: _col0, _col1
                         Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
@@ -161,7 +161,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
                 Select Operator
-                  expressions: TIMESTAMP'2012-01-01 09:02:03' (type: timestamp), _col1 (type: bigint)
+                  expressions: TIMESTAMP'2012-01-01 01:02:03.0' (type: timestamp), _col1 (type: bigint)
                   outputColumnNames: _col0, _col1
                   Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
index 97752f3..7b09a1a 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
@@ -393,11 +393,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n9
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n9@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n9
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
index 23c33a3..442bd88 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
@@ -393,11 +393,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n5
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n5@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n5
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
index a043b67..8c25d3a 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
@@ -410,11 +410,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n6
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n6
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
index 35c1fae..4e95bd8 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
@@ -410,11 +410,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n0
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n0
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
index 7e1cce3..f58338f 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
@@ -594,11 +594,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n6
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n6@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n6
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
index 219ad7a..9dddc12 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
@@ -603,11 +603,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
index ce9fe84..0f3c600 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
@@ -607,11 +607,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n12
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n12
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
index e02a1e2..91ec73b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
@@ -558,11 +558,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n7
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n7@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n7
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
index 30c0f85..d292e54 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
@@ -188,10 +188,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n2@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n2@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
 6	1	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n2
@@ -468,10 +468,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n2@part=2
 insert_num	part	b	s2
 1	1	original	NULL
 2	1	original	NULL
-3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
 7	2	new	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
 8	1	new	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
 PREHOOK: query: drop table part_add_various_various_struct2_n2

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
index 83e0fe4..d7ee10b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
@@ -189,10 +189,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n5@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n5@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
 6	1	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n5
@@ -470,10 +470,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n5@part=2
 insert_num	part	b	s2
 1	1	original	NULL
 2	1	original	NULL
-3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
 7	2	new	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
 8	1	new	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
 PREHOOK: query: drop table part_add_various_various_struct2_n5

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
index f70efd6..d6382c6 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
@@ -297,16 +297,16 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n6
 POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n6@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
-101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416490068	original
 101	1	true	true	true	true	true	true	true	true	true	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 102	1	false	false	false	false	false	false	false	false	false	127	127	127	127	127	127	127	127	127	127	127	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	new
-102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 104	1	true	true	true	true	true	true	true	true	true	23	23	23	23	23	23	23	23	23	23	23	834	834	834	834	834	834	834	834	834	834	834	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	new
 105	1	false	false	false	false	false	false	false	false	false	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	new
-105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n6
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n6
@@ -522,11 +522,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n6
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n6@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n6
 PREHOOK: type: DROPTABLE
@@ -669,11 +669,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n6
 POSTHOOK: Input: default@part_change_various_various_timestamp_n6@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 09:54:28.970117179	6229-06-28 09:54:28.97011	6229-06-28 09:54:28.97011	1950-12-18 08:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	2049-12-18 08:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 13:34:14	1978-08-02 13:34:14	1978-08-02 13:34:14	2021-09-24 07:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-07 00:20:39.72036854	1991-01-07 00:20:39.72036	1991-01-07 00:20:39.72036	2024-11-11 08:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n6
 PREHOOK: type: DROPTABLE
@@ -800,10 +800,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n6
 POSTHOOK: Input: default@part_change_various_various_date_n6@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	b
-101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-27	original
-102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-08	original
+101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-28	original
+102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-09	original
 103	1	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-01	original
+104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-02	original
 105	1	2024-11-11	2024-11-11	2024-11-11	1991-01-06	original
 111	1	1964-01-24	1964-01-24	1964-01-24	1964-01-24	new
 PREHOOK: query: drop table part_change_various_various_date_n6

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
index 351d201..f22c79a 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
@@ -298,16 +298,16 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n5
 POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n5@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
-101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	NULL	NULL	true	NULL	true	1	NULL	NULL	NULL	NULL	NULL	NULL	-128	-128	-128	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1	-128	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	NULL	1	-128	NULL	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	134416490068	original
 101	1	true	true	true	true	true	true	true	true	true	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 102	1	false	false	false	false	false	false	false	false	false	127	127	127	127	127	127	127	127	127	127	127	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	32767	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	new
-102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	NULL	NULL	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	127	127	127	NULL	0	127	NULL	NULL	NULL	NULL	NULL	32767	32767	32767	NULL	0	127	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	NULL	0	127	32767	2147483647	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	NULL	true	1	NULL	NULL	NULL	-100	30	NULL	23	23	23	NULL	1	23	NULL	NULL	-100	30	NULL	834	834	834	NULL	1	23	834	NULL	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 104	1	true	true	true	true	true	true	true	true	true	23	23	23	23	23	23	23	23	23	23	23	834	834	834	834	834	834	834	834	834	834	834	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	203332	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	new
 105	1	false	false	false	false	false	false	false	false	false	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-99	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-999992	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	new
-105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	NULL	true	0	NULL	NULL	NULL	NULL	NULL	NULL	-99	-99	-99	NULL	0	-99	NULL	NULL	NULL	NULL	NULL	-28300	-28300	-28300	NULL	0	-99	-28300	NULL	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n5
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n5
@@ -524,11 +524,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n5
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n5@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35978	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n5
 PREHOOK: type: DROPTABLE
@@ -672,11 +672,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n5
 POSTHOOK: Input: default@part_change_various_various_timestamp_n5@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 09:54:28.970117179	6229-06-28 09:54:28.97011	6229-06-28 09:54:28.97011	1950-12-18 08:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	5966-07-09 10:30:50.597	2049-12-18 08:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 13:34:14	1978-08-02 13:34:14	1978-08-02 13:34:14	2021-09-24 07:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-07 00:20:39.72036854	1991-01-07 00:20:39.72036	1991-01-07 00:20:39.72036	2024-11-11 08:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n5
 PREHOOK: type: DROPTABLE
@@ -804,10 +804,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n5
 POSTHOOK: Input: default@part_change_various_various_date_n5@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	b
-101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-27	original
-102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-08	original
+101	1	1950-12-18	1950-12-18	1950-12-18	6229-06-28	original
+102	1	2049-12-18	2049-12-18	2049-12-18	5966-07-09	original
 103	1	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-01	original
+104	1	2021-09-24	2021-09-24	2021-09-24	1978-08-02	original
 105	1	2024-11-11	2024-11-11	2024-11-11	1991-01-06	original
 111	1	1964-01-24	1964-01-24	1964-01-24	1964-01-24	new
 PREHOOK: query: drop table part_change_various_various_date_n5

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
index 603c669..e363776 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
@@ -562,11 +562,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n2
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n2@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n2
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
index 454bf26..8696b9b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
@@ -531,11 +531,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n10
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n10
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
index 7ccda3d..198dc28 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
@@ -534,11 +534,11 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n7
 #### A masked pattern was here ####
 insert_num	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table table_change_date_group_string_group_date_group_n7
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
index e5aa7f4..69167ef 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
@@ -662,11 +662,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n4
 POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n4@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	b
-101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-27 19:54:28.970117179	6229-06-27 19:54:28.970117179                     	6229-06-27 19:5	6229-06-27 19:54:28.970117179	6229-06-27 19:5	original
-102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-08 20:30:50.597	5966-07-08 20:30:50.597                           	5966-07-08 20:3	5966-07-08 20:30:50.597	5966-07-08 20:3	original
+101	1	1950-12-18	1950-12-18                                        	1950-12-18     	1950-12-18	1950-12-18	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.970117179                     	6229-06-28 02:5	6229-06-28 02:54:28.970117179	6229-06-28 02:5	original
+102	1	2049-12-18	2049-12-18                                        	2049-12-18     	2049-12-18	2049-12-18	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597                           	5966-07-09 03:3	5966-07-09 03:30:50.597	5966-07-09 03:3	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-01 23:34:14.0	1978-08-01 23:34:14.0                             	1978-08-01 23:3	1978-08-01 23:34:14.0	1978-08-01 23:3	original
-105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 08:20:39.72036854	1991-01-06 08:20:39.72036854                      	1991-01-06 08:2	1991-01-06 08:20:39.72036854	1991-01-06 08:2	original
+104	1	2021-09-24	2021-09-24                                        	2021-09-24     	2021-09-24	2021-09-24	1978-08-02 06:34:14.0	1978-08-02 06:34:14.0                             	1978-08-02 06:3	1978-08-02 06:34:14.0	1978-08-02 06:3	original
+105	1	2024-11-11	2024-11-11                                        	2024-11-11     	2024-11-11	2024-11-11	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036854                      	1991-01-06 16:2	1991-01-06 16:20:39.72036854	1991-01-06 16:2	original
 111	1	filler	filler                                            	filler         	filler	filler	filler	filler                                            	filler         	filler	filler	new
 PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n4
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
index 74c4b39..78b5231 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
@@ -214,10 +214,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n8@part=1
 POSTHOOK: Input: default@part_change_various_various_struct1_n8@part=2
 #### A masked pattern was here ####
 insert_num	part	s1	b
-1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
-2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}	original
-3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
-4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
+1	1	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}	original
+2	1	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}	original
+3	1	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}	original
+4	1	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}	original
 5	2	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}	new
 6	1	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}	new
 PREHOOK: query: drop table part_change_various_various_struct1_n8
@@ -520,10 +520,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n8@part=2
 insert_num	part	b	s2
 1	1	original	NULL
 2	1	original	NULL
-3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3	1	new	{"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile  ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4	1	new	{"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":"  baffling","c10":"  baffling    ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5	2	new	{"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6	2	new	{"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
 7	2	new	{"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
 8	1	new	{"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
 PREHOOK: query: drop table part_add_various_various_struct2_n8


[30/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
index cedbba2..68b038f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -21,10 +21,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import java.sql.Timestamp;
 import java.util.Arrays;
 
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
 // type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
@@ -42,7 +44,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     super(outputColumnNum);
     this.colNum = colNum;
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritableV2.daysToMillis((int) value));
+    this.value.setTime(DateWritable.daysToMillis((int) value));
   }
 
   public DateScalarSubtractDateColumn() {
@@ -89,7 +91,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     if (inputColVector2.isRepeating) {
       if (inputColVector2.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+        scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
         dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
         outputColVector.setFromScratchIntervalDayTime(0);
       } else {
@@ -106,14 +108,14 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = false;
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         Arrays.fill(outputIsNull, 0, n, false);
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -127,14 +129,14 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = inputIsNull[i];
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -146,7 +148,7 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
+    return "val " + value + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
index e444b68..e150789 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descript
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.Arrays;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
index eff20c9..252a816 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -181,9 +181,9 @@ public class DynamicValueVectorExpression extends VectorExpression {
         decimalValue = PrimitiveObjectInspectorUtils.getHiveDecimal(val, poi);
         break;
       case DATE:
-        longValue = DateWritableV2.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
+        longValue = DateWritable.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
       case TIMESTAMP:
-        timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi).toSqlTimestamp();
+        timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi);
         break;
       case INTERVAL_YEAR_MONTH:
         longValue = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(val, poi).getTotalMonths();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
index f924e2e..0d9f9f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 abstract public class TimestampToStringUnaryUDF extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
-  protected final int inputColumn;
+  private final int inputColumn;
 
   public TimestampToStringUnaryUDF(int inputColumn, int outputColumnNum) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
index 929f845..48638b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
@@ -41,7 +41,7 @@ public interface VectorExpressionWriter {
   Object writeValue(byte[] value, int start, int length) throws HiveException;
   Object writeValue(HiveDecimalWritable value) throws HiveException;
   Object writeValue(HiveDecimal value) throws HiveException;
-  Object writeValue(TimestampWritableV2 value) throws HiveException;
+  Object writeValue(TimestampWritable value) throws HiveException;
   Object writeValue(Timestamp value) throws HiveException;
   Object writeValue(HiveIntervalDayTimeWritable value) throws HiveException;
   Object writeValue(HiveIntervalDayTime value) throws HiveException;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
index 6a87927..a086535 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects.
@@ -184,14 +185,14 @@ public final class VectorExpressionWriterFactory {
      * The base implementation must be overridden by the Timestamp specialization
      */
     @Override
-    public Object writeValue(TimestampWritableV2 value) throws HiveException {
+    public Object writeValue(TimestampWritable value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
     /**
      * The base implementation must be overridden by the Timestamp specialization
      */
-    public Object setValue(Object field, TimestampWritableV2 value) throws HiveException {
+    public Object setValue(Object field, TimestampWritable value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
@@ -470,9 +471,9 @@ public final class VectorExpressionWriterFactory {
     @Override
     public Object writeValue(ColumnVector column, int row) throws HiveException {
       TimestampColumnVector dcv = (TimestampColumnVector) column;
-      TimestampWritableV2 timestampWritable = (TimestampWritableV2) dcv.getScratchWritable();
+      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
       if (timestampWritable == null) {
-        timestampWritable = new TimestampWritableV2();
+        timestampWritable = new TimestampWritable();
         dcv.setScratchWritable(timestampWritable);
       }
       if (dcv.noNulls && !dcv.isRepeating) {
@@ -497,9 +498,9 @@ public final class VectorExpressionWriterFactory {
     @Override
     public Object setValue(Object field, ColumnVector column, int row) throws HiveException {
       TimestampColumnVector dcv = (TimestampColumnVector) column;
-      TimestampWritableV2 timestampWritable = (TimestampWritableV2) dcv.getScratchWritable();
+      TimestampWritable timestampWritable = (TimestampWritable) dcv.getScratchWritable();
       if (timestampWritable == null) {
-        timestampWritable = new TimestampWritableV2();
+        timestampWritable = new TimestampWritable();
         dcv.setScratchWritable(timestampWritable);
       }
       if (dcv.noNulls && !dcv.isRepeating) {
@@ -785,7 +786,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object setValue(Object field, TimestampWritableV2 value) {
+      public Object setValue(Object field, TimestampWritable value) {
         if (null == field) {
           field = initValue(null);
         }
@@ -823,7 +824,7 @@ public final class VectorExpressionWriterFactory {
 
       @Override
       public Object writeValue(long value) {
-        dt.setTime(DateWritableV2.daysToMillis((int) value));
+        dt.setTime(DateWritable.daysToMillis((int) value));
         ((SettableDateObjectInspector) this.objectInspector).set(obj, dt);
         return obj;
       }
@@ -833,7 +834,7 @@ public final class VectorExpressionWriterFactory {
         if (null == field) {
           field = initValue(null);
         }
-        dt.setTime(DateWritableV2.daysToMillis((int) value));
+        dt.setTime(DateWritable.daysToMillis((int) value));
         ((SettableDateObjectInspector) this.objectInspector).set(field, dt);
         return field;
       }
@@ -859,7 +860,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object writeValue(TimestampWritableV2 value) throws HiveException {
+      public Object writeValue(TimestampWritable value) throws HiveException {
         return ((SettableTimestampObjectInspector) this.objectInspector).set(obj, value);
       }
 
@@ -879,7 +880,7 @@ public final class VectorExpressionWriterFactory {
       }
 
       @Override
-      public Object setValue(Object field, TimestampWritableV2 value) {
+      public Object setValue(Object field, TimestampWritable value) {
         if (null == field) {
           field = initValue(null);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
index 998b6b1..0c83494 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -26,12 +25,15 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.DateParser;
 
+import java.util.Arrays;
+import java.sql.Date;
+
 public class VectorUDFDateAddColCol extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
@@ -41,6 +43,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
   protected boolean isPositive = true;
 
   private transient final Text text = new Text();
+  private transient final Date date = new Date(0);
   private transient final DateParser dateParser = new DateParser();
 
   // Transient members initialized by transientInit method.
@@ -258,7 +261,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
   protected long evaluateTimestamp(ColumnVector columnVector, int index, long numDays) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritableV2.millisToDays(tcv.getTime(index));
+    long days = DateWritable.millisToDays(tcv.getTime(index));
     if (isPositive) {
       days += numDays;
     } else {
@@ -278,7 +281,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
     }
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritableV2.millisToDays(tcv.getTime(0));
+    long days = DateWritable.millisToDays(tcv.getTime(0));
 
     evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
   }
@@ -289,14 +292,13 @@ public class VectorUDFDateAddColCol extends VectorExpression {
       outputVector.isNull[index] = true;
     } else {
       text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
-      Date hDate = new Date();
-      boolean parsed = dateParser.parseDate(text.toString(), hDate);
+      boolean parsed = dateParser.parseDate(text.toString(), date);
       if (!parsed) {
         outputVector.noNulls = false;
         outputVector.isNull[index] = true;
         return;
       }
-      long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
+      long days = DateWritable.millisToDays(date.getTime());
       if (isPositive) {
         days += numDays;
       } else {
@@ -317,7 +319,6 @@ public class VectorUDFDateAddColCol extends VectorExpression {
     }
     text.set(
         inputColumnVector1.vector[0], inputColumnVector1.start[0], inputColumnVector1.length[0]);
-    Date date = new Date();
     boolean parsed = dateParser.parseDate(text.toString(), date);
     if (!parsed) {
       outputVector.noNulls = false;
@@ -325,7 +326,7 @@ public class VectorUDFDateAddColCol extends VectorExpression {
       outputVector.isRepeating = true;
       return;
     }
-    long days = DateWritableV2.millisToDays(date.toEpochMilli());
+    long days = DateWritable.millisToDays(date.getTime());
 
     evaluateRepeatedCommon(days, vector2, outputVector, selectedInUse, selected, n);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
index 30b20c8..a73d2e6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
@@ -25,12 +25,13 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hive.common.util.DateParser;
 
+import java.sql.Date;
 import java.util.Arrays;
 
 public class VectorUDFDateAddColScalar extends VectorExpression {
@@ -43,6 +44,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
 
   private transient final Text text = new Text();
   private transient final DateParser dateParser = new DateParser();
+  private transient final Date date = new Date(0);
 
   // Transient members initialized by transientInit method.
   private transient PrimitiveCategory primitiveCategory;
@@ -303,7 +305,7 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
   protected long evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     // Convert to date value (in days)
-    long days = DateWritableV2.millisToDays(tcv.getTime(index));
+    long days = DateWritable.millisToDays(tcv.getTime(index));
     if (isPositive) {
       days += numDays;
     } else {
@@ -326,14 +328,13 @@ public class VectorUDFDateAddColScalar extends VectorExpression {
   protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
     BytesColumnVector bcv = (BytesColumnVector) columnVector;
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
-    org.apache.hadoop.hive.common.type.Date hDate = new org.apache.hadoop.hive.common.type.Date();
-    boolean parsed = dateParser.parseDate(text.toString(), hDate);
+    boolean parsed = dateParser.parseDate(text.toString(), date);
     if (!parsed) {
       outputVector.noNulls = false;
       outputVector.isNull[i] = true;
       return;
     }
-    long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
+    long days = DateWritable.millisToDays(date.getTime());
     if (isPositive) {
       days += numDays;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
index 669ca55..c861321 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
@@ -18,17 +18,17 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.common.util.DateParser;
 
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.Arrays;
 
@@ -46,7 +46,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
   protected boolean isPositive = true;
 
   private transient final DateParser dateParser = new DateParser();
-  private transient final Date baseDate = new Date();
+  private transient final Date baseDate = new Date(0);
 
   // Transient members initialized by transientInit method.
   private transient PrimitiveCategory primitiveCategory;
@@ -99,11 +99,11 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
 
     switch (primitiveCategory) {
       case DATE:
-        baseDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
+        baseDate.setTime(DateWritable.daysToMillis((int) longValue));
         break;
 
       case TIMESTAMP:
-        baseDate.setTimeInMillis(timestampValue.getTime());
+        baseDate.setTime(timestampValue.getTime());
         break;
 
       case STRING:
@@ -137,7 +137,7 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
     // We do not need to do a column reset since we are carefully changing the output.
     outputColVector.isRepeating = false;
 
-    long baseDateDays = DateWritableV2.millisToDays(baseDate.toEpochMilli());
+    long baseDateDays = DateWritable.millisToDays(baseDate.getTime());
     if (inputCol.isRepeating) {
       if (inputCol.noNulls || !inputCol.isNull[0]) {
         outputColVector.isNull[0] = false;
@@ -245,12 +245,11 @@ public class VectorUDFDateAddScalarCol extends VectorExpression {
   public String vectorExpressionParameters() {
     String value;
     if (object instanceof Long) {
-      Date tempDate = new Date();
-      tempDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
+      Date tempDate = new Date(0);
+      tempDate.setTime(DateWritable.daysToMillis((int) longValue));
       value = tempDate.toString();
     } else if (object instanceof Timestamp) {
-      value = org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
-          timestampValue.getTime(), timestampValue.getNanos()).toString();
+      value = this.timestampValue.toString();
     } else if (object instanceof byte []) {
       value = new String(this.stringValue, StandardCharsets.UTF_8);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
index ff29ee3..bb12fcb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
@@ -20,12 +20,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -216,7 +217,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
         String string = new String(input.vector[0], input.start[0], input.length[0]);
         try {
           date.setTime(formatter.parse(string).getTime());
-          output.vector[0] = DateWritableV2.dateToDays(date);
+          output.vector[0] = DateWritable.dateToDays(date);
           output.isNull[0] = false;
         } catch (ParseException e) {
           output.isNull[0] = true;
@@ -295,7 +296,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
     String string = new String(input.vector[i], input.start[i], input.length[i]);
     try {
       date.setTime(formatter.parse(string).getTime());
-      output.vector[i] = DateWritableV2.dateToDays(date);
+      output.vector[i] = DateWritable.dateToDays(date);
     } catch (ParseException e) {
       output.isNull[i] = true;
       output.noNulls = false;
@@ -313,7 +314,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
     if (input.isRepeating) {
       if (input.noNulls || !input.isNull[0]) {
         date.setTime(input.getTime(0));
-        output.vector[0] = DateWritableV2.dateToDays(date);
+        output.vector[0] = DateWritable.dateToDays(date);
         output.isNull[0] = false;
       } else {
         output.isNull[0] = true;
@@ -333,14 +334,14 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
           for (int j=0; j < size; j++) {
             int i = sel[j];
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritableV2.dateToDays(date);
+            output.vector[i] = DateWritable.dateToDays(date);
             output.isNull[i] = false;
           }
         } else {
           for (int j=0; j < size; j++) {
             int i = sel[j];
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritableV2.dateToDays(date);
+            output.vector[i] = DateWritable.dateToDays(date);
           }
         }
       } else {
@@ -351,7 +352,7 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
         }
         for (int i = 0; i < size; i++) {
           date.setTime(input.getTime(i));
-          output.vector[i] = DateWritableV2.dateToDays(date);
+          output.vector[i] = DateWritable.dateToDays(date);
         }
       }
     } else /* there are nulls in our column */ {
@@ -372,14 +373,14 @@ public class VectorUDFDateDiffColCol extends VectorExpression {
           int i = sel[j];
           if (!input.isNull[i]) {
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritableV2.dateToDays(date);
+            output.vector[i] = DateWritable.dateToDays(date);
           }
         }
       } else {
         for (int i = 0; i < size; i++) {
           if (!input.isNull[i]) {
             date.setTime(input.getTime(i));
-            output.vector[i] = DateWritableV2.dateToDays(date);
+            output.vector[i] = DateWritable.dateToDays(date);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
index caedc80..55af413 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -25,11 +26,12 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
 
+import java.nio.charset.StandardCharsets;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.text.ParseException;
@@ -104,7 +106,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
 
       case TIMESTAMP:
         date.setTime(timestampValue.getTime());
-        baseDate = DateWritableV2.dateToDays(date);
+        baseDate = DateWritable.dateToDays(date);
         break;
 
       case STRING:
@@ -112,7 +114,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
       case VARCHAR:
         try {
           date.setTime(formatter.parse(new String(bytesValue, "UTF-8")).getTime());
-          baseDate = DateWritableV2.dateToDays(date);
+          baseDate = DateWritable.dateToDays(date);
           break;
         } catch (Exception e) {
           outputColVector.noNulls = false;
@@ -344,7 +346,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     date.setTime(tcv.getTime(index));
-    return DateWritableV2.dateToDays(date) - baseDate;
+    return DateWritable.dateToDays(date) - baseDate;
   }
 
   protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -357,7 +359,7 @@ public class VectorUDFDateDiffColScalar extends VectorExpression {
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
     try {
       date.setTime(formatter.parse(text.toString()).getTime());
-      output.vector[i] = DateWritableV2.dateToDays(date) - baseDate;
+      output.vector[i] = DateWritable.dateToDays(date) - baseDate;
     } catch (ParseException e) {
       output.vector[i] = 1;
       output.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
index 28addf7..c51d3cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.io.Text;
@@ -105,7 +105,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
 
       case TIMESTAMP:
         date.setTime(timestampValue.getTime());
-        baseDate = DateWritableV2.dateToDays(date);
+        baseDate = DateWritable.dateToDays(date);
         break;
 
       case STRING:
@@ -113,7 +113,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
       case VARCHAR:
         try {
           date.setTime(formatter.parse(new String(stringValue, "UTF-8")).getTime());
-          baseDate = DateWritableV2.dateToDays(date);
+          baseDate = DateWritable.dateToDays(date);
           break;
         } catch (Exception e) {
           outputColVector.noNulls = false;
@@ -346,7 +346,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
   protected int evaluateTimestamp(ColumnVector columnVector, int index) {
     TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
     date.setTime(tcv.getTime(index));
-    return baseDate - DateWritableV2.dateToDays(date);
+    return baseDate - DateWritable.dateToDays(date);
   }
 
   protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -359,7 +359,7 @@ public class VectorUDFDateDiffScalarCol extends VectorExpression {
     text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
     try {
       date.setTime(formatter.parse(text.toString()).getTime());
-      output.vector[i] = baseDate - DateWritableV2.dateToDays(date);
+      output.vector[i] = baseDate - DateWritable.dateToDays(date);
     } catch (ParseException e) {
       output.vector[i] = 1;
       output.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
index 8e5f9da..202f5d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
@@ -18,6 +18,14 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+import java.io.UnsupportedEncodingException;
+import java.sql.Date;
+import java.text.SimpleDateFormat;
+
 /**
  * Vectorized version of TO_DATE(TIMESTAMP)/TO_DATE(DATE).
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
index accf32c..8d87ef7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
@@ -18,6 +18,17 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hive.common.util.DateParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.sql.Date;
+
 /**
  * Vectorized version of TO_DATE(STRING)
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
index 5b497e5..7e7080c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
@@ -18,6 +18,15 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
+import java.io.UnsupportedEncodingException;
+import java.sql.Date;
+import java.text.SimpleDateFormat;
+
 /**
  * Vectorized version of TO_DATE(timestamp).
  * As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
index e8f1454..a701d70 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDayOfWeekString.java
@@ -23,7 +23,6 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.TimeZone;
 
 import org.apache.hadoop.io.Text;
 
@@ -36,8 +35,7 @@ public final class VectorUDFDayOfWeekString extends VectorUDFTimestampFieldStrin
   private static final long serialVersionUID = 1L;
 
   private transient final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
-  private transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  private transient final Calendar calendar = Calendar.getInstance();
 
   public VectorUDFDayOfWeekString(int colNum, int outputColumnNum) {
     super(colNum, outputColumnNum, -1, -1);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
index 837de9d..a1167b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
@@ -20,14 +20,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.common.util.DateUtils;
@@ -44,8 +43,7 @@ public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected transient final Calendar calendar = Calendar.getInstance();
 
   public VectorUDFTimestampFieldDate(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);
@@ -71,7 +69,7 @@ public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
   }
 
   protected long getDateField(long days) {
-    calendar.setTimeInMillis(DateWritableV2.daysToMillis((int) days));
+    calendar.setTimeInMillis(DateWritable.daysToMillis((int) days));
     return calendar.get(field);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
index 9acfa86..931cc98 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 import java.text.ParseException;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 /**
  * Abstract class to return various fields from a String.
@@ -40,8 +39,7 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
   protected final int fieldLength;
   private static final String patternMin = "0000-00-00 00:00:00.000000000";
   private static final String patternMax = "9999-19-99 29:59:59.999999999";
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected transient final Calendar calendar = Calendar.getInstance();
 
   public VectorUDFTimestampFieldString(int colNum, int outputColumnNum, int fieldStart, int fieldLength) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
index 94e8b47..a9ea1f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -44,8 +43,7 @@ public abstract class VectorUDFTimestampFieldTimestamp extends VectorExpression
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected transient final Calendar calendar = Calendar.getInstance();
 
   public VectorUDFTimestampFieldTimestamp(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
index 1f83ead..3515329 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * Return Unix Timestamp.
@@ -28,7 +28,7 @@ public final class VectorUDFUnixTimeStampDate extends VectorUDFTimestampFieldDat
 
   private static final long serialVersionUID = 1L;
 
-  private DateWritableV2 dateWritable;
+  private DateWritable dateWritable;
 
   @Override
   protected long getDateField(long days) {
@@ -39,7 +39,7 @@ public final class VectorUDFUnixTimeStampDate extends VectorUDFTimestampFieldDat
   public VectorUDFUnixTimeStampDate(int colNum, int outputColumnNum) {
     /* not a real field */
     super(-1, colNum, outputColumnNum);
-    dateWritable = new DateWritableV2();
+    dateWritable = new DateWritable();
   }
 
   public VectorUDFUnixTimeStampDate() {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
index 3b5b33b..aaea433 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampString.java
@@ -25,7 +25,6 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.TimeZone;
 
 /**
  * Return Unix Timestamp.
@@ -35,9 +34,8 @@ public final class VectorUDFUnixTimeStampString extends VectorUDFTimestampFieldS
 
   private static final long serialVersionUID = 1L;
 
-  private transient final SimpleDateFormat format = getFormatter();
-  private transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  private transient final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  private transient final Calendar calendar = Calendar.getInstance();
 
   public VectorUDFUnixTimeStampString(int colNum, int outputColumnNum) {
     super(colNum, outputColumnNum, -1, -1);
@@ -58,10 +56,4 @@ public final class VectorUDFUnixTimeStampString extends VectorUDFTimestampFieldS
     calendar.setTime(date);
     return calendar.getTimeInMillis() / 1000;
   }
-
-  private static SimpleDateFormat getFormatter() {
-    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-    format.setTimeZone(TimeZone.getTimeZone("UTC"));
-    return format;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
index 31ef6ad..e011657 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
@@ -17,17 +17,37 @@
  */
 package org.apache.hadoop.hive.ql.exec.vector.udf;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Map;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.SettableMapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
 
 /**
  * A VectorUDFAdaptor is a vectorized expression for invoking a custom

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
index 434a5b8..c88ee99 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.hive.llap.DebugUtils;
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,7 +37,6 @@ import java.util.Map;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -48,12 +47,13 @@ import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -429,18 +429,18 @@ public abstract class BatchToRowReader<StructType, UnionType>
     }
   }
 
-  public static DateWritableV2 nextDate(ColumnVector vector,
-                                        int row,
-                                        Object previous) {
+  public static DateWritable nextDate(ColumnVector vector,
+                               int row,
+                               Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      DateWritableV2 result;
-      if (previous == null || previous.getClass() != DateWritableV2.class) {
-        result = new DateWritableV2();
+      DateWritable result;
+      if (previous == null || previous.getClass() != DateWritable.class) {
+        result = new DateWritable();
       } else {
-        result = (DateWritableV2) previous;
+        result = (DateWritable) previous;
       }
       int date = (int) ((LongColumnVector) vector).vector[row];
       result.set(date);
@@ -450,18 +450,18 @@ public abstract class BatchToRowReader<StructType, UnionType>
     }
   }
 
-  public static TimestampWritableV2 nextTimestamp(ColumnVector vector,
-                                                  int row,
-                                                  Object previous) {
+  public static TimestampWritable nextTimestamp(ColumnVector vector,
+                                         int row,
+                                         Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      TimestampWritableV2 result;
-      if (previous == null || previous.getClass() != TimestampWritableV2.class) {
-        result = new TimestampWritableV2();
+      TimestampWritable result;
+      if (previous == null || previous.getClass() != TimestampWritable.class) {
+        result = new TimestampWritable();
       } else {
-        result = (TimestampWritableV2) previous;
+        result = (TimestampWritable) previous;
       }
       TimestampColumnVector tcv = (TimestampColumnVector) vector;
       result.setInternal(tcv.time[row], tcv.nanos[row]);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index e7dfb05..56d590c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -58,7 +58,6 @@ public final class OrcFile extends org.apache.orc.OrcFile {
   public static class ReaderOptions extends org.apache.orc.OrcFile.ReaderOptions {
     public ReaderOptions(Configuration conf) {
       super(conf);
-      useUTCTimestamp(true);
     }
 
     public ReaderOptions filesystem(FileSystem fs) {
@@ -80,11 +79,6 @@ public final class OrcFile extends org.apache.orc.OrcFile {
       super.orcTail(orcTail);
       return this;
     }
-
-    public ReaderOptions useUTCTimestamp(boolean value) {
-      super.useUTCTimestamp(value);
-      return this;
-    }
   }
 
   public static ReaderOptions readerOptions(Configuration conf) {
@@ -110,7 +104,6 @@ public final class OrcFile extends org.apache.orc.OrcFile {
 
     WriterOptions(Properties tableProperties, Configuration conf) {
       super(tableProperties, conf);
-      useUTCTimestamp(true);
     }
 
    /**
@@ -282,11 +275,6 @@ public final class OrcFile extends org.apache.orc.OrcFile {
       return this;
     }
 
-    public WriterOptions useUTCTimestamp(boolean value) {
-      super.useUTCTimestamp(value);
-      return this;
-    }
-
     ObjectInspector getInspector() {
       return inspector;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index d177e3f..c6fe4fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -38,19 +38,20 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.orc.OrcFile;
 import org.apache.orc.TypeDescription;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -414,18 +415,18 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
     }
   }
 
-  static DateWritableV2 nextDate(ColumnVector vector,
-                                 int row,
-                                 Object previous) {
+  static DateWritable nextDate(ColumnVector vector,
+                               int row,
+                               Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      DateWritableV2 result;
-      if (previous == null || previous.getClass() != DateWritableV2.class) {
-        result = new DateWritableV2();
+      DateWritable result;
+      if (previous == null || previous.getClass() != DateWritable.class) {
+        result = new DateWritable();
       } else {
-        result = (DateWritableV2) previous;
+        result = (DateWritable) previous;
       }
       int date = (int) ((LongColumnVector) vector).vector[row];
       result.set(date);
@@ -435,18 +436,18 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
     }
   }
 
-  static TimestampWritableV2 nextTimestamp(ColumnVector vector,
-                                           int row,
-                                           Object previous) {
+  static TimestampWritable nextTimestamp(ColumnVector vector,
+                                         int row,
+                                         Object previous) {
     if (vector.isRepeating) {
       row = 0;
     }
     if (vector.noNulls || !vector.isNull[row]) {
-      TimestampWritableV2 result;
-      if (previous == null || previous.getClass() != TimestampWritableV2.class) {
-        result = new TimestampWritableV2();
+      TimestampWritable result;
+      if (previous == null || previous.getClass() != TimestampWritable.class) {
+        result = new TimestampWritable();
       } else {
-        result = (TimestampWritableV2) previous;
+        result = (TimestampWritable) previous;
       }
       TimestampColumnVector tcv = (TimestampColumnVector) vector;
       result.setInternal(tcv.time[row], tcv.nanos[row]);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 4082c61..91a01e9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -19,17 +19,19 @@
 package org.apache.hadoop.hive.ql.io.orc;
 
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ListColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -41,7 +43,6 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
@@ -62,6 +63,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectIn
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.orc.PhysicalWriter;
 import org.apache.orc.TypeDescription;
 
 /**
@@ -203,9 +206,9 @@ public class WriterImpl extends org.apache.orc.impl.WriterImpl implements Writer
             }
             case TIMESTAMP: {
               TimestampColumnVector vector = (TimestampColumnVector) column;
-              vector.setIsUTC(true);
-              vector.set(rowId, ((TimestampObjectInspector) inspector)
-                  .getPrimitiveJavaObject(obj).toSqlTimestamp());
+              Timestamp ts = ((TimestampObjectInspector) inspector)
+                  .getPrimitiveJavaObject(obj);
+              vector.set(rowId, ts);
               break;
             }
             case DATE: {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
index 08788cf..8be8d13 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
@@ -14,18 +14,18 @@
 package org.apache.hadoop.hive.ql.io.parquet.convert;
 
 import java.math.BigDecimal;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -340,12 +340,12 @@ public enum ETypeConverter {
       };
     }
   },
-  ETIMESTAMP_CONVERTER(TimestampWritableV2.class) {
+  ETIMESTAMP_CONVERTER(TimestampWritable.class) {
     @Override
     PrimitiveConverter getConverter(final PrimitiveType type, final int index, final ConverterParent parent, TypeInfo hiveTypeInfo) {
-      return new BinaryConverter<TimestampWritableV2>(type, parent, index) {
+      return new BinaryConverter<TimestampWritable>(type, parent, index) {
         @Override
-        protected TimestampWritableV2 convert(Binary binary) {
+        protected TimestampWritable convert(Binary binary) {
           NanoTime nt = NanoTime.fromBinary(binary);
           Map<String, String> metadata = parent.getMetadata();
           //Current Hive parquet timestamp implementation stores it in UTC, but other components do not do that.
@@ -353,18 +353,18 @@ public enum ETypeConverter {
           boolean skipConversion = Boolean.parseBoolean(
               metadata.get(HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION.varname));
           Timestamp ts = NanoTimeUtils.getTimestamp(nt, skipConversion);
-          return new TimestampWritableV2(ts);
+          return new TimestampWritable(ts);
         }
       };
     }
   },
-  EDATE_CONVERTER(DateWritableV2.class) {
+  EDATE_CONVERTER(DateWritable.class) {
     @Override
     PrimitiveConverter getConverter(final PrimitiveType type, final int index, final ConverterParent parent, TypeInfo hiveTypeInfo) {
       return new PrimitiveConverter() {
         @Override
         public void addInt(final int value) {
-          parent.set(index, new DateWritableV2(value));
+          parent.set(index, new DateWritable(value));
         }
       };
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
index 106ace0..dead324 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/primitive/ParquetStringInspector.java
@@ -18,7 +18,7 @@ import java.nio.charset.CharacterCodingException;
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableStringObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.BooleanWritable;
 
 /**
- * The ParquetStringInspector inspects a BytesWritable, TimestampWritableV2, HiveDecimalWritable,
+ * The ParquetStringInspector inspects a BytesWritable, TimestampWritable, HiveDecimalWritable,
  * DoubleWritable, FloatWritable, LongWritable, IntWritable, and BooleanWritable to give a Text
  * or String.
  *
@@ -58,7 +58,7 @@ public class ParquetStringInspector extends JavaStringObjectInspector implements
       return new Text((String) o);
     }
 
-    if ((o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable)
+    if ((o instanceof TimestampWritable) || (o instanceof HiveDecimalWritable)
         || (o instanceof DoubleWritable) || (o instanceof FloatWritable)
         || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -90,7 +90,7 @@ public class ParquetStringInspector extends JavaStringObjectInspector implements
       return (String) o;
     }
 
-    if ((o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable)
+    if ((o instanceof TimestampWritable) || (o instanceof HiveDecimalWritable)
         || (o instanceof DoubleWritable) || (o instanceof FloatWritable)
         || (o instanceof LongWritable) || (o instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
index bf78d8c..677fb53 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
@@ -13,13 +13,12 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.timestamp;
 
+import java.sql.Timestamp;
 import java.util.Calendar;
 import java.util.GregorianCalendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-
 import jodd.datetime.JDateTime;
 
 /**
@@ -59,7 +58,7 @@ public class NanoTimeUtils {
    public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) {
 
      Calendar calendar = getCalendar(skipConversion);
-     calendar.setTimeInMillis(ts.toEpochMilli());
+     calendar.setTime(ts);
      int year = calendar.get(Calendar.YEAR);
      if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
        year = 1 - year;
@@ -107,7 +106,8 @@ public class NanoTimeUtils {
      calendar.set(Calendar.HOUR_OF_DAY, hour);
      calendar.set(Calendar.MINUTE, minutes);
      calendar.set(Calendar.SECOND, seconds);
-     Timestamp ts = Timestamp.ofEpochMilli(calendar.getTimeInMillis(), (int) nanos);
+     Timestamp ts = new Timestamp(calendar.getTimeInMillis());
+     ts.setNanos((int) nanos);
      return ts;
    }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
index 26a4511..4fd4cfd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
@@ -19,10 +19,10 @@
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
 import org.apache.parquet.bytes.ByteBufferInputStream;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.parquet.column.Dictionary;
 
 import java.io.IOException;
+import java.sql.Timestamp;
 
 /**
  * The interface to wrap the underlying Parquet dictionary and non dictionary encoded page reader.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
index 9170e9f..f5f19e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
 import org.apache.hadoop.hive.common.type.HiveBaseChar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
@@ -42,6 +41,7 @@ import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.sql.Timestamp;
 import java.util.Arrays;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
index e4c6156..e89a736 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
@@ -13,7 +13,6 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.vector;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -388,7 +387,7 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
         switch (descriptor.getType()) {
         //INT64 is not yet supported
         case INT96:
-          c.set(rowId, dataColumn.readTimestamp().toSqlTimestamp());
+          c.set(rowId, dataColumn.readTimestamp());
           break;
         default:
           throw new IOException(
@@ -511,7 +510,7 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
     case TIMESTAMP:
       for (int i = rowId; i < rowId + num; ++i) {
         ((TimestampColumnVector) column)
-            .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
+            .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]));
       }
       break;
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
index 3d61c33..cf1210b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
@@ -13,12 +13,12 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.write;
 
-import org.apache.hadoop.hive.common.type.Date;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -46,9 +46,9 @@ import org.apache.parquet.io.api.RecordConsumer;
 import org.apache.parquet.schema.GroupType;
 import org.apache.parquet.schema.OriginalType;
 import org.apache.parquet.schema.Type;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
@@ -550,7 +550,7 @@ public class DataWritableWriter {
     @Override
     public void write(Object value) {
       Date vDate = inspector.getPrimitiveJavaObject(value);
-      recordConsumer.addInteger(DateWritableV2.dateToDays(vDate));
+      recordConsumer.addInteger(DateWritable.dateToDays(vDate));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index 7af6dab..44687ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -54,7 +54,7 @@ import org.apache.hadoop.hive.ql.metadata.UniqueConstraint.UniqueConstraintCol;
 import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol;
 import org.apache.hadoop.hive.ql.plan.DescTableDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hive.common.util.HiveStringUtils;
 
 import com.google.common.collect.Lists;
@@ -107,7 +107,7 @@ public final class MetaDataFormatUtils {
       return "";
     }
 
-    DateWritableV2 writableValue = new DateWritableV2((int) val.getDaysSinceEpoch());
+    DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch());
     return writableValue.toString();
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
index f3ec709..857f300 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
@@ -66,7 +66,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -180,7 +180,7 @@ public class StatsOptimizer extends Transform {
 
     enum DateSubType {
       DAYS {@Override
-        Object cast(long longValue) { return (new DateWritableV2((int)longValue)).get();}
+        Object cast(long longValue) { return (new DateWritable((int)longValue)).get();}
       };
       abstract Object cast(long longValue);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index d950991..12af94e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -18,6 +18,8 @@
 package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
 
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -43,11 +45,9 @@ import org.apache.calcite.sql.type.SqlTypeUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.TimeString;
 import org.apache.calcite.util.TimestampString;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.Hive;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 2ae015a..7cedab6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -46,14 +46,12 @@ import org.apache.calcite.util.ConversionUtil;
 import org.apache.calcite.util.DateString;
 import org.apache.calcite.util.NlsString;
 import org.apache.calcite.util.TimestampString;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -101,11 +99,14 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
+import java.sql.Timestamp;
 import java.time.Instant;
 import java.util.ArrayList;
 import java.util.Calendar;
+import java.util.Date;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 public class RexNodeConverter {
@@ -393,6 +394,8 @@ public class RexNodeConverter {
       GenericUDF udf = func.getGenericUDF();
       if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
           || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
+          // Calcite can not specify the scale for timestamp. As a result, all
+          // the millisecond part will be lost
           || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ)
           || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
         castExpr = cluster.getRexBuilder().makeAbstractCast(
@@ -698,9 +701,9 @@ public class RexNodeConverter {
       calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
       break;
     case DATE:
-      final Date date = (Date) value;
-      calciteLiteral = rexBuilder.makeDateLiteral(
-          DateString.fromDaysSinceEpoch(date.toEpochDay()));
+      final Calendar cal = Calendar.getInstance(Locale.getDefault());
+      cal.setTime((Date) value);
+      calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
       break;
     case TIMESTAMP:
       final TimestampString tsString;
@@ -708,7 +711,9 @@ public class RexNodeConverter {
         tsString = TimestampString.fromCalendarFields((Calendar) value);
       } else {
         final Timestamp ts = (Timestamp) value;
-        tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
+        final Calendar calt = Calendar.getInstance(Locale.getDefault());
+        calt.setTimeInMillis(ts.getTime());
+        tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
       }
       // Must call makeLiteral, not makeTimestampLiteral
       // to have the RexBuilder.roundTime logic kick in


[06/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
index ee7602a..2a390fa 100644
--- a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
@@ -158,7 +158,7 @@ POSTHOOK: Input: default@interval_arithmetic_1
 dateval	_c1	_c2	_c3	_c4	_c5	_c6
 0004-09-22	0002-07-22	0006-11-22	0006-11-22	0002-07-22	0002-07-22	0006-11-22
 0528-10-27	0526-08-27	0530-12-27	0530-12-27	0526-08-27	0526-08-27	0530-12-27
-1319-02-02	1316-12-03	1321-04-02	1321-04-02	1316-12-03	1316-12-03	1321-04-02
+1319-02-02	1316-12-02	1321-04-02	1321-04-02	1316-12-02	1316-12-02	1321-04-02
 1404-07-23	1402-05-23	1406-09-23	1406-09-23	1402-05-23	1402-05-23	1406-09-23
 1815-05-06	1813-03-06	1817-07-06	1817-07-06	1813-03-06	1813-03-06	1817-07-06
 1883-04-17	1881-02-17	1885-06-17	1885-06-17	1881-02-17	1881-02-17	1885-06-17
@@ -249,7 +249,7 @@ STAGE PLANS:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [0, 3, 4, 5]
-                  selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
+                  selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07 00:00:00.0) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07 00:00:00.0, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
               Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: date)
@@ -315,56 +315,56 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@interval_arithmetic_1
 #### A masked pattern was here ####
 dateval	_c1	_c2	_c3
-0004-09-22	-728551 00:00:00.000000000	728551 00:00:00.000000000	0 00:00:00.000000000
-0528-10-27	-537129 00:00:00.000000000	537129 00:00:00.000000000	0 00:00:00.000000000
-1319-02-02	-248490 00:00:00.000000000	248490 00:00:00.000000000	0 00:00:00.000000000
-1404-07-23	-217273 00:00:00.000000000	217273 00:00:00.000000000	0 00:00:00.000000000
-1815-05-06	-67237 00:00:00.000000000	67237 00:00:00.000000000	0 00:00:00.000000000
-1883-04-17	-42419 00:00:00.000000000	42419 00:00:00.000000000	0 00:00:00.000000000
+0004-09-22	-728552 23:00:00.000000000	728552 23:00:00.000000000	0 00:00:00.000000000
+0528-10-27	-537126 23:00:00.000000000	537126 23:00:00.000000000	0 00:00:00.000000000
+1319-02-02	-248481 23:00:00.000000000	248481 23:00:00.000000000	0 00:00:00.000000000
+1404-07-23	-217263 23:00:00.000000000	217263 23:00:00.000000000	0 00:00:00.000000000
+1815-05-06	-67236 23:00:00.000000000	67236 23:00:00.000000000	0 00:00:00.000000000
+1883-04-17	-42418 23:00:00.000000000	42418 23:00:00.000000000	0 00:00:00.000000000
 1966-08-16	-11983 00:00:00.000000000	11983 00:00:00.000000000	0 00:00:00.000000000
-1973-04-17	-9547 00:00:00.000000000	9547 00:00:00.000000000	0 00:00:00.000000000
+1973-04-17	-9546 23:00:00.000000000	9546 23:00:00.000000000	0 00:00:00.000000000
 1974-10-04	-9012 00:00:00.000000000	9012 00:00:00.000000000	0 00:00:00.000000000
-1976-03-03	-8496 00:00:00.000000000	8496 00:00:00.000000000	0 00:00:00.000000000
+1976-03-03	-8495 23:00:00.000000000	8495 23:00:00.000000000	0 00:00:00.000000000
 1976-05-06	-8432 00:00:00.000000000	8432 00:00:00.000000000	0 00:00:00.000000000
 1978-08-05	-7611 00:00:00.000000000	7611 00:00:00.000000000	0 00:00:00.000000000
-1981-04-25	-6617 00:00:00.000000000	6617 00:00:00.000000000	0 00:00:00.000000000
-1981-11-15	-6413 00:00:00.000000000	6413 00:00:00.000000000	0 00:00:00.000000000
+1981-04-25	-6616 23:00:00.000000000	6616 23:00:00.000000000	0 00:00:00.000000000
+1981-11-15	-6412 23:00:00.000000000	6412 23:00:00.000000000	0 00:00:00.000000000
 1985-07-20	-5070 00:00:00.000000000	5070 00:00:00.000000000	0 00:00:00.000000000
-1985-11-18	-4949 00:00:00.000000000	4949 00:00:00.000000000	0 00:00:00.000000000
-1987-02-21	-4489 00:00:00.000000000	4489 00:00:00.000000000	0 00:00:00.000000000
+1985-11-18	-4948 23:00:00.000000000	4948 23:00:00.000000000	0 00:00:00.000000000
+1987-02-21	-4488 23:00:00.000000000	4488 23:00:00.000000000	0 00:00:00.000000000
 1987-05-28	-4393 00:00:00.000000000	4393 00:00:00.000000000	0 00:00:00.000000000
 1998-10-16	-234 00:00:00.000000000	234 00:00:00.000000000	0 00:00:00.000000000
 1999-10-03	118 00:00:00.000000000	-118 00:00:00.000000000	0 00:00:00.000000000
-2000-12-18	560 00:00:00.000000000	-560 00:00:00.000000000	0 00:00:00.000000000
+2000-12-18	560 01:00:00.000000000	-560 01:00:00.000000000	0 00:00:00.000000000
 2002-05-10	1068 00:00:00.000000000	-1068 00:00:00.000000000	0 00:00:00.000000000
 2003-09-23	1569 00:00:00.000000000	-1569 00:00:00.000000000	0 00:00:00.000000000
-2004-03-07	1735 00:00:00.000000000	-1735 00:00:00.000000000	0 00:00:00.000000000
-2007-02-09	2804 00:00:00.000000000	-2804 00:00:00.000000000	0 00:00:00.000000000
-2009-01-21	3516 00:00:00.000000000	-3516 00:00:00.000000000	0 00:00:00.000000000
+2004-03-07	1735 01:00:00.000000000	-1735 01:00:00.000000000	0 00:00:00.000000000
+2007-02-09	2804 01:00:00.000000000	-2804 01:00:00.000000000	0 00:00:00.000000000
+2009-01-21	3516 01:00:00.000000000	-3516 01:00:00.000000000	0 00:00:00.000000000
 2010-04-08	3958 00:00:00.000000000	-3958 00:00:00.000000000	0 00:00:00.000000000
 2013-04-07	5053 00:00:00.000000000	-5053 00:00:00.000000000	0 00:00:00.000000000
 2013-04-10	5056 00:00:00.000000000	-5056 00:00:00.000000000	0 00:00:00.000000000
 2021-09-24	8145 00:00:00.000000000	-8145 00:00:00.000000000	0 00:00:00.000000000
-2024-11-11	9289 00:00:00.000000000	-9289 00:00:00.000000000	0 00:00:00.000000000
+2024-11-11	9289 01:00:00.000000000	-9289 01:00:00.000000000	0 00:00:00.000000000
 4143-07-08	783111 00:00:00.000000000	-783111 00:00:00.000000000	0 00:00:00.000000000
-4966-12-04	1083855 00:00:00.000000000	-1083855 00:00:00.000000000	0 00:00:00.000000000
-5339-02-01	1219784 00:00:00.000000000	-1219784 00:00:00.000000000	0 00:00:00.000000000
+4966-12-04	1083855 01:00:00.000000000	-1083855 01:00:00.000000000	0 00:00:00.000000000
+5339-02-01	1219784 01:00:00.000000000	-1219784 01:00:00.000000000	0 00:00:00.000000000
 5344-10-04	1221856 00:00:00.000000000	-1221856 00:00:00.000000000	0 00:00:00.000000000
 5397-07-13	1241131 00:00:00.000000000	-1241131 00:00:00.000000000	0 00:00:00.000000000
 5966-07-09	1448949 00:00:00.000000000	-1448949 00:00:00.000000000	0 00:00:00.000000000
 6229-06-28	1544997 00:00:00.000000000	-1544997 00:00:00.000000000	0 00:00:00.000000000
 6482-04-27	1637342 00:00:00.000000000	-1637342 00:00:00.000000000	0 00:00:00.000000000
-6631-11-13	1691962 00:00:00.000000000	-1691962 00:00:00.000000000	0 00:00:00.000000000
+6631-11-13	1691962 01:00:00.000000000	-1691962 01:00:00.000000000	0 00:00:00.000000000
 6705-09-28	1718944 00:00:00.000000000	-1718944 00:00:00.000000000	0 00:00:00.000000000
-6731-02-12	1728212 00:00:00.000000000	-1728212 00:00:00.000000000	0 00:00:00.000000000
-7160-12-02	1885195 00:00:00.000000000	-1885195 00:00:00.000000000	0 00:00:00.000000000
+6731-02-12	1728212 01:00:00.000000000	-1728212 01:00:00.000000000	0 00:00:00.000000000
+7160-12-02	1885195 01:00:00.000000000	-1885195 01:00:00.000000000	0 00:00:00.000000000
 7409-09-07	1976054 00:00:00.000000000	-1976054 00:00:00.000000000	0 00:00:00.000000000
 7503-06-23	2010310 00:00:00.000000000	-2010310 00:00:00.000000000	0 00:00:00.000000000
 8422-07-22	2345998 00:00:00.000000000	-2345998 00:00:00.000000000	0 00:00:00.000000000
-8521-01-16	2381970 00:00:00.000000000	-2381970 00:00:00.000000000	0 00:00:00.000000000
+8521-01-16	2381970 01:00:00.000000000	-2381970 01:00:00.000000000	0 00:00:00.000000000
 9075-06-13	2584462 00:00:00.000000000	-2584462 00:00:00.000000000	0 00:00:00.000000000
-9209-11-11	2633556 00:00:00.000000000	-2633556 00:00:00.000000000	0 00:00:00.000000000
-9403-01-09	2704106 00:00:00.000000000	-2704106 00:00:00.000000000	0 00:00:00.000000000
+9209-11-11	2633556 01:00:00.000000000	-2633556 01:00:00.000000000	0 00:00:00.000000000
+9403-01-09	2704106 01:00:00.000000000	-2704106 01:00:00.000000000	0 00:00:00.000000000
 PREHOOK: query: explain vectorization expression
 select
   tsval,
@@ -488,53 +488,53 @@ POSTHOOK: Input: default@interval_arithmetic_1
 tsval	_c1	_c2	_c3	_c4	_c5	_c6
 0004-09-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222
 0528-10-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273
-1319-02-02 16:31:57.778	1316-12-03 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-03 16:31:57.778	1316-12-03 16:31:57.778	1321-04-02 16:31:57.778
+1319-02-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778
 1404-07-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026
 1815-05-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705
 1883-04-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229
 1966-08-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031
-1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 06:30:38.596784156	1975-06-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 06:30:38.596784156
-1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 17:21:03.989	1976-12-04 17:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 17:21:03.989
-1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 04:54:33.000895162	1978-05-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 04:54:33.000895162
+1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156	1975-06-17 07:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156
+1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989	1976-12-04 16:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989
+1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162	1978-05-03 05:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162
 1976-05-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948
 1978-08-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501	1980-10-05 14:41:05.501	1976-06-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501
-1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 09:01:12.077192689	1983-06-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 09:01:12.077192689
-1981-11-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1984-01-15 23:03:10.999338387
+1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689	1983-06-25 10:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689
+1981-11-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387
 1985-07-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11	1987-09-20 09:30:11	1983-05-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11
-1985-11-18 16:37:54	1983-09-18 16:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 16:37:54	1983-09-18 16:37:54	1988-01-18 16:37:54
-1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 19:48:29	1989-04-21 19:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 19:48:29
-1987-05-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1989-07-28 13:52:07.900916635
-1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 20:05:29.397591987	2000-12-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 20:05:29.397591987
-1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 16:59:10.396903939	2001-12-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 16:59:10.396903939
-2000-12-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	2003-02-18 08:42:30.000595596
-2002-05-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2004-07-10 05:29:48.990818073
-2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 22:33:17.00003252	2005-11-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 22:33:17.00003252
-2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 20:14:13	2006-05-07 20:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 20:14:13
-2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 05:17:29.368756876	2009-04-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 05:17:29.368756876
-2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 10:49:07.108	2011-03-21 10:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 10:49:07.108
-2010-04-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2012-06-08 02:43:35.861742727
-2013-04-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2015-06-07 02:44:43.00086821
-2013-04-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2015-06-10 00:43:46.854731546
-2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 03:18:32.413655165	2023-11-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 03:18:32.413655165
-2024-11-11 16:42:41.101	2022-09-11 16:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 16:42:41.101	2022-09-11 16:42:41.101	2027-01-11 16:42:41.101
+1985-11-18 16:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 17:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54
+1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29	1989-04-21 20:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29
+1987-05-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987	2000-12-16 19:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987
+1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939	2001-12-03 15:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939
+2000-12-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252	2005-11-23 21:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252
+2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13	2006-05-07 21:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13
+2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876	2009-04-09 06:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876
+2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108	2011-03-21 11:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108
+2010-04-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165	2023-11-24 02:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165
+2024-11-11 16:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 17:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101
 4143-07-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259
-4966-12-04 09:30:55.202	4964-10-04 09:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 09:30:55.202	4964-10-04 09:30:55.202	4969-02-04 09:30:55.202
-5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 14:10:01.085678691	5341-04-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 14:10:01.085678691
-5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 18:40:08.165	5346-12-04 18:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 18:40:08.165
+4966-12-04 09:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 10:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691	5341-04-01 15:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691
+5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165	5346-12-04 17:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165
 5397-07-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438
 5966-07-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597	5968-09-09 03:30:50.597	5964-05-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597
 6229-06-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179
-6482-04-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6484-06-27 12:07:38.073915413
-6631-11-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6634-01-13 16:31:29.702202248
-6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 18:27:28.000845672	6707-11-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 18:27:28.000845672
-6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 08:12:48.287783702	6733-04-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 08:12:48.287783702
-7160-12-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7163-02-02 06:00:24.81200852
-7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 23:33:32.459349602	7411-11-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 23:33:32.459349602
+6482-04-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672	6707-11-28 17:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672
+6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702	6733-04-12 09:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702
+7160-12-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602	7411-11-07 22:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602
 7503-06-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486	7505-08-23 23:14:17.486	7501-04-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486
 8422-07-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084
-8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 20:42:05.668832388	8523-03-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 20:42:05.668832388
+8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388	8523-03-16 21:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388
 9075-06-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797
-9209-11-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9212-01-11 04:08:58.223768453
+9209-11-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453
 9403-01-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547	9405-03-09 18:12:33.547	9400-11-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547
 PREHOOK: query: explain vectorization expression
 select
@@ -760,50 +760,50 @@ dateval	_c1	_c2	_c3	_c4	_c5	_c6
 1404-07-23	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789	1404-10-30 11:22:33.123456789	1404-04-14 12:37:26.876543211	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789
 1815-05-06	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789	1815-08-13 11:22:33.123456789	1815-01-26 12:37:26.876543211	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789
 1883-04-17	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789	1883-07-25 11:22:33.123456789	1883-01-07 12:37:26.876543211	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789
-1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 11:22:33.123456789	1966-11-23 11:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 11:22:33.123456789
-1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 11:22:33.123456789	1973-07-25 11:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 11:22:33.123456789
-1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 11:22:33.123456789	1975-01-11 11:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 11:22:33.123456789
-1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 11:22:33.123456789	1976-06-10 11:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 11:22:33.123456789
-1976-05-06	1976-01-27 12:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 12:37:26.876543211	1976-01-27 12:37:26.876543211	1976-08-13 11:22:33.123456789
-1978-08-05	1978-04-27 12:37:26.876543211	1978-11-12 11:22:33.123456789	1978-11-12 11:22:33.123456789	1978-04-27 12:37:26.876543211	1978-04-27 12:37:26.876543211	1978-11-12 11:22:33.123456789
-1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 11:22:33.123456789	1981-08-02 11:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 11:22:33.123456789
-1981-11-15	1981-08-07 12:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 12:37:26.876543211	1981-08-07 12:37:26.876543211	1982-02-22 11:22:33.123456789
-1985-07-20	1985-04-11 12:37:26.876543211	1985-10-27 11:22:33.123456789	1985-10-27 11:22:33.123456789	1985-04-11 12:37:26.876543211	1985-04-11 12:37:26.876543211	1985-10-27 11:22:33.123456789
-1985-11-18	1985-08-10 12:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 12:37:26.876543211	1985-08-10 12:37:26.876543211	1986-02-25 11:22:33.123456789
-1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 11:22:33.123456789	1987-05-31 11:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 11:22:33.123456789
-1987-05-28	1987-02-17 12:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 12:37:26.876543211	1987-02-17 12:37:26.876543211	1987-09-04 11:22:33.123456789
-1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 11:22:33.123456789	1999-01-23 11:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 11:22:33.123456789
-1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 11:22:33.123456789	2000-01-10 11:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 11:22:33.123456789
-2000-12-18	2000-09-09 12:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 12:37:26.876543211	2000-09-09 12:37:26.876543211	2001-03-27 11:22:33.123456789
-2002-05-10	2002-01-30 12:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 12:37:26.876543211	2002-01-30 12:37:26.876543211	2002-08-17 11:22:33.123456789
-2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 11:22:33.123456789	2003-12-31 11:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 11:22:33.123456789
-2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 11:22:33.123456789	2004-06-14 11:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 11:22:33.123456789
-2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 11:22:33.123456789	2007-05-19 11:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 11:22:33.123456789
-2009-01-21	2008-10-13 12:37:26.876543211	2009-04-30 11:22:33.123456789	2009-04-30 11:22:33.123456789	2008-10-13 12:37:26.876543211	2008-10-13 12:37:26.876543211	2009-04-30 11:22:33.123456789
-2010-04-08	2009-12-29 12:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 12:37:26.876543211	2009-12-29 12:37:26.876543211	2010-07-16 11:22:33.123456789
-2013-04-07	2012-12-28 12:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 12:37:26.876543211	2012-12-28 12:37:26.876543211	2013-07-15 11:22:33.123456789
-2013-04-10	2012-12-31 12:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 12:37:26.876543211	2012-12-31 12:37:26.876543211	2013-07-18 11:22:33.123456789
-2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 11:22:33.123456789	2022-01-01 11:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 11:22:33.123456789
-2024-11-11	2024-08-03 12:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 12:37:26.876543211	2024-08-03 12:37:26.876543211	2025-02-18 11:22:33.123456789
+1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789	1966-11-23 10:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789
+1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789	1973-07-25 12:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789
+1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789	1975-01-11 10:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789
+1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789	1976-06-10 12:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789
+1976-05-06	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 11:37:26.876543211	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789
+1978-08-05	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789	1978-11-12 10:22:33.123456789	1978-04-27 11:37:26.876543211	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789
+1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789	1981-08-02 12:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789
+1981-11-15	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 13:37:26.876543211	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789
+1985-07-20	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789	1985-10-27 10:22:33.123456789	1985-04-11 11:37:26.876543211	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789
+1985-11-18	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 13:37:26.876543211	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789
+1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789	1987-05-31 12:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789
+1987-05-28	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 11:37:26.876543211	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789
+1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789	1999-01-23 10:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789
+1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789	2000-01-10 10:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789
+2000-12-18	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 13:37:26.876543211	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789
+2002-05-10	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 11:37:26.876543211	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789
+2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789	2003-12-31 10:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789
+2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789	2004-06-14 12:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789
+2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789	2007-05-19 12:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789
+2009-01-21	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789	2009-04-30 12:22:33.123456789	2008-10-13 13:37:26.876543211	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789
+2010-04-08	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 11:37:26.876543211	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789
+2013-04-07	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 11:37:26.876543211	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789
+2013-04-10	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 11:37:26.876543211	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789
+2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789	2022-01-01 10:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789
+2024-11-11	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 13:37:26.876543211	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789
 4143-07-08	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789	4143-10-15 11:22:33.123456789	4143-03-30 12:37:26.876543211	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789
-4966-12-04	4966-08-26 12:37:26.876543211	4967-03-13 11:22:33.123456789	4967-03-13 11:22:33.123456789	4966-08-26 12:37:26.876543211	4966-08-26 12:37:26.876543211	4967-03-13 11:22:33.123456789
-5339-02-01	5338-10-24 12:37:26.876543211	5339-05-11 11:22:33.123456789	5339-05-11 11:22:33.123456789	5338-10-24 12:37:26.876543211	5338-10-24 12:37:26.876543211	5339-05-11 11:22:33.123456789
-5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 11:22:33.123456789	5345-01-11 11:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 11:22:33.123456789
+4966-12-04	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789	4967-03-13 12:22:33.123456789	4966-08-26 13:37:26.876543211	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789
+5339-02-01	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789	5339-05-11 12:22:33.123456789	5338-10-24 13:37:26.876543211	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789
+5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789	5345-01-11 10:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789
 5397-07-13	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789	5397-10-20 11:22:33.123456789	5397-04-04 12:37:26.876543211	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789
 5966-07-09	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789	5966-10-16 11:22:33.123456789	5966-03-31 12:37:26.876543211	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789
 6229-06-28	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789	6229-10-05 11:22:33.123456789	6229-03-20 12:37:26.876543211	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789
-6482-04-27	6482-01-17 12:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 12:37:26.876543211	6482-01-17 12:37:26.876543211	6482-08-04 11:22:33.123456789
-6631-11-13	6631-08-05 12:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 12:37:26.876543211	6631-08-05 12:37:26.876543211	6632-02-20 11:22:33.123456789
-6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 11:22:33.123456789	6706-01-05 11:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 11:22:33.123456789
-6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 11:22:33.123456789	6731-05-22 11:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 11:22:33.123456789
-7160-12-02	7160-08-24 12:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 12:37:26.876543211	7160-08-24 12:37:26.876543211	7161-03-11 11:22:33.123456789
-7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 11:22:33.123456789	7409-12-15 11:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 11:22:33.123456789
+6482-04-27	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 11:37:26.876543211	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789
+6631-11-13	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 13:37:26.876543211	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789
+6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789	6706-01-05 10:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789
+6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789	6731-05-22 12:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789
+7160-12-02	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 13:37:26.876543211	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789
+7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789	7409-12-15 10:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789
 7503-06-23	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789	7503-09-30 11:22:33.123456789	7503-03-15 12:37:26.876543211	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789
 8422-07-22	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789	8422-10-29 11:22:33.123456789	8422-04-13 12:37:26.876543211	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789
-8521-01-16	8520-10-08 12:37:26.876543211	8521-04-25 11:22:33.123456789	8521-04-25 11:22:33.123456789	8520-10-08 12:37:26.876543211	8520-10-08 12:37:26.876543211	8521-04-25 11:22:33.123456789
-9075-06-13	9075-03-05 12:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 12:37:26.876543211	9075-03-05 12:37:26.876543211	9075-09-20 11:22:33.123456789
-9209-11-11	9209-08-03 12:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 12:37:26.876543211	9209-08-03 12:37:26.876543211	9210-02-18 11:22:33.123456789
-9403-01-09	9402-10-01 12:37:26.876543211	9403-04-18 11:22:33.123456789	9403-04-18 11:22:33.123456789	9402-10-01 12:37:26.876543211	9402-10-01 12:37:26.876543211	9403-04-18 11:22:33.123456789
+8521-01-16	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789	8521-04-25 12:22:33.123456789	8520-10-08 13:37:26.876543211	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789
+9075-06-13	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 11:37:26.876543211	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789
+9209-11-11	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 13:37:26.876543211	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789
+9403-01-09	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789	9403-04-18 12:22:33.123456789	9402-10-01 13:37:26.876543211	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789
 PREHOOK: query: explain vectorization expression
 select
   dateval,
@@ -1094,50 +1094,50 @@ tsval	_c1	_c2	_c3	_c4	_c5	_c6
 1404-07-23 15:32:16.059185026	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815	1404-10-31 02:54:49.182641815	1404-04-15 04:09:42.935728237	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815
 1815-05-06 00:12:37.543584705	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494	1815-08-13 11:35:10.667041494	1815-01-26 12:50:04.420127916	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494
 1883-04-17 04:14:34.647766229	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018	1883-07-25 15:37:07.771223018	1883-01-07 16:52:01.52430944	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018
-1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-24 00:59:23.30707482	1966-11-24 00:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-24 00:59:23.30707482
-1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 17:53:11.720240945	1973-07-25 17:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 17:53:11.720240945
-1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 04:43:37.112456789	1975-01-12 04:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 04:43:37.112456789
-1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 16:17:06.124351951	1976-06-10 16:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 16:17:06.124351951
-1976-05-06 00:42:30.910786948	1976-01-27 13:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 13:19:57.787330159	1976-01-27 13:19:57.787330159	1976-08-13 12:05:04.034243737
-1978-08-05 14:41:05.501	1978-04-28 03:18:32.377543211	1978-11-13 02:03:38.624456789	1978-11-13 02:03:38.624456789	1978-04-28 03:18:32.377543211	1978-04-28 03:18:32.377543211	1978-11-13 02:03:38.624456789
-1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 20:23:45.200649478	1981-08-02 20:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 20:23:45.200649478
-1981-11-15 23:03:10.999338387	1981-08-08 11:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 11:40:37.875881598	1981-08-08 11:40:37.875881598	1982-02-23 10:25:44.122795176
-1985-07-20 09:30:11	1985-04-11 22:07:37.876543211	1985-10-27 20:52:44.123456789	1985-10-27 20:52:44.123456789	1985-04-11 22:07:37.876543211	1985-04-11 22:07:37.876543211	1985-10-27 20:52:44.123456789
-1985-11-18 16:37:54	1985-08-11 05:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 05:15:20.876543211	1985-08-11 05:15:20.876543211	1986-02-26 04:00:27.123456789
-1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 07:11:02.123456789	1987-06-01 07:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 07:11:02.123456789
-1987-05-28 13:52:07.900916635	1987-02-18 02:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 02:29:34.777459846	1987-02-18 02:29:34.777459846	1987-09-05 01:14:41.024373424
-1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 07:28:02.521048776	1999-01-24 07:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 07:28:02.521048776
-1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 04:21:43.520360728	2000-01-11 04:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 04:21:43.520360728
-2000-12-18 08:42:30.000595596	2000-09-09 21:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 21:19:56.877138807	2000-09-09 21:19:56.877138807	2001-03-27 20:05:03.124052385
-2002-05-10 05:29:48.990818073	2002-01-30 18:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 18:07:15.867361284	2002-01-30 18:07:15.867361284	2002-08-17 16:52:22.114274862
-2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 09:55:50.123489309	2004-01-01 09:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 09:55:50.123489309
-2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 07:36:46.123456789	2004-06-15 07:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 07:36:46.123456789
-2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 16:40:02.492213665	2007-05-19 16:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 16:40:02.492213665
-2009-01-21 10:49:07.108	2008-10-13 23:26:33.984543211	2009-04-30 22:11:40.231456789	2009-04-30 22:11:40.231456789	2008-10-13 23:26:33.984543211	2008-10-13 23:26:33.984543211	2009-04-30 22:11:40.231456789
-2010-04-08 02:43:35.861742727	2009-12-29 15:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 15:21:02.738285938	2009-12-29 15:21:02.738285938	2010-07-16 14:06:08.985199516
-2013-04-07 02:44:43.00086821	2012-12-28 15:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 15:22:09.877411421	2012-12-28 15:22:09.877411421	2013-07-15 14:07:16.124324999
-2013-04-10 00:43:46.854731546	2012-12-31 13:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 13:21:13.731274757	2012-12-31 13:21:13.731274757	2013-07-18 12:06:19.978188335
-2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 14:41:05.537111954	2022-01-01 14:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 14:41:05.537111954
-2024-11-11 16:42:41.101	2024-08-04 05:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 05:20:07.977543211	2024-08-04 05:20:07.977543211	2025-02-19 04:05:14.224456789
+1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482	1966-11-23 23:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482
+1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945	1973-07-25 18:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945
+1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789	1975-01-12 03:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789
+1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951	1976-06-10 17:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951
+1976-05-06 00:42:30.910786948	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 12:19:57.787330159	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789	1978-11-13 01:03:38.624456789	1978-04-28 02:18:32.377543211	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789
+1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478	1981-08-02 21:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478
+1981-11-15 23:03:10.999338387	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 12:40:37.875881598	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789	1985-10-27 19:52:44.123456789	1985-04-11 21:07:37.876543211	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789
+1985-11-18 16:37:54	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 06:15:20.876543211	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789	1987-06-01 08:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789
+1987-05-28 13:52:07.900916635	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 01:29:34.777459846	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776	1999-01-24 06:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776
+1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728	2000-01-11 03:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728
+2000-12-18 08:42:30.000595596	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 22:19:56.877138807	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 17:07:15.867361284	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309	2004-01-01 08:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309
+2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789	2004-06-15 08:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789
+2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665	2007-05-19 17:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665
+2009-01-21 10:49:07.108	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789	2009-04-30 23:11:40.231456789	2008-10-14 00:26:33.984543211	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789
+2010-04-08 02:43:35.861742727	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 14:21:02.738285938	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 14:22:09.877411421	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 12:21:13.731274757	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954	2022-01-01 13:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954
+2024-11-11 16:42:41.101	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 06:20:07.977543211	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789
 4143-07-08 10:53:27.252802259	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048	4143-10-15 22:16:00.376259048	4143-03-30 23:30:54.12934547	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048
-4966-12-04 09:30:55.202	4966-08-26 22:08:22.078543211	4967-03-13 20:53:28.325456789	4967-03-13 20:53:28.325456789	4966-08-26 22:08:22.078543211	4966-08-26 22:08:22.078543211	4967-03-13 20:53:28.325456789
-5339-02-01 14:10:01.085678691	5338-10-25 02:47:27.962221902	5339-05-12 01:32:34.20913548	5339-05-12 01:32:34.20913548	5338-10-25 02:47:27.962221902	5338-10-25 02:47:27.962221902	5339-05-12 01:32:34.20913548
-5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 06:02:41.288456789	5345-01-12 06:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 06:02:41.288456789
+4966-12-04 09:30:55.202	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789	4967-03-13 21:53:28.325456789	4966-08-26 23:08:22.078543211	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789
+5339-02-01 14:10:01.085678691	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548	5339-05-12 02:32:34.20913548	5338-10-25 03:47:27.962221902	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548
+5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789	5345-01-12 05:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789
 5397-07-13 07:12:32.000896438	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227	5397-10-20 18:35:05.124353227	5397-04-04 19:49:58.877439649	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227
 5966-07-09 03:30:50.597	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789	5966-10-16 14:53:23.720456789	5966-03-31 16:08:17.473543211	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789
 6229-06-28 02:54:28.970117179	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968	6229-10-05 14:17:02.093573968	6229-03-20 15:31:55.84666039	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968
-6482-04-27 12:07:38.073915413	6482-01-18 00:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-18 00:45:04.950458624	6482-01-18 00:45:04.950458624	6482-08-04 23:30:11.197372202
-6631-11-13 16:31:29.702202248	6631-08-06 05:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 05:08:56.578745459	6631-08-06 05:08:56.578745459	6632-02-21 03:54:02.825659037
-6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 05:50:01.124302461	6706-01-06 05:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 05:50:01.124302461
-6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 19:35:21.411240491	6731-05-22 19:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 19:35:21.411240491
-7160-12-02 06:00:24.81200852	7160-08-24 18:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 18:37:51.688551731	7160-08-24 18:37:51.688551731	7161-03-11 17:22:57.935465309
-7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 10:56:05.582806391	7409-12-16 10:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 10:56:05.582806391
+6482-04-27 12:07:38.073915413	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-17 23:45:04.950458624	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 06:08:56.578745459	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461	6706-01-06 04:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461
+6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491	6731-05-22 20:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491
+7160-12-02 06:00:24.81200852	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 19:37:51.688551731	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391	7409-12-16 09:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391
 7503-06-23 23:14:17.486	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789	7503-10-01 10:36:50.609456789	7503-03-16 11:51:44.362543211	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789
 8422-07-22 03:21:45.745036084	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873	8422-10-29 14:44:18.868492873	8422-04-13 15:59:12.621579295	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873
-8521-01-16 20:42:05.668832388	8520-10-09 09:19:32.545375599	8521-04-26 08:04:38.792289177	8521-04-26 08:04:38.792289177	8520-10-09 09:19:32.545375599	8520-10-09 09:19:32.545375599	8521-04-26 08:04:38.792289177
-9075-06-13 16:20:09.218517797	9075-03-06 04:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 04:57:36.095061008	9075-03-06 04:57:36.095061008	9075-09-21 03:42:42.341974586
-9209-11-11 04:08:58.223768453	9209-08-03 16:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 16:46:25.100311664	9209-08-03 16:46:25.100311664	9210-02-18 15:31:31.347225242
-9403-01-09 18:12:33.547	9402-10-02 06:50:00.423543211	9403-04-19 05:35:06.670456789	9403-04-19 05:35:06.670456789	9402-10-02 06:50:00.423543211	9402-10-02 06:50:00.423543211	9403-04-19 05:35:06.670456789
+8521-01-16 20:42:05.668832388	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177	8521-04-26 09:04:38.792289177	8520-10-09 10:19:32.545375599	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177
+9075-06-13 16:20:09.218517797	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 03:57:36.095061008	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 17:46:25.100311664	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789	9403-04-19 06:35:06.670456789	9402-10-02 07:50:00.423543211	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789
 PREHOOK: query: explain vectorization expression
 select
   interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorization_13.q.out b/ql/src/test/results/clientpositive/vectorization_13.q.out
index 990d754..cae441e 100644
--- a/ql/src/test/results/clientpositive/vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -87,8 +87,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -246,8 +246,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -280,8 +280,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -355,8 +355,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -388,8 +388,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -417,8 +417,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-              predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                  predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+              predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
               Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -563,8 +563,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -597,8 +597,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorization_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorization_7.q.out b/ql/src/test/results/clientpositive/vectorization_7.q.out
index 6b5d40b..a1eb6f1 100644
--- a/ql/src/test/results/clientpositive/vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -74,8 +74,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 1292362 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -155,11 +155,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -183,11 +183,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -237,11 +237,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -294,8 +294,8 @@ STAGE PLANS:
               Filter Vectorization:
                   className: VectorFilterOperator
                   native: true
-                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-              predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                  predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+              predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
               Statistics: Num rows: 5461 Data size: 1292362 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -369,11 +369,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -397,11 +397,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out
index 59b20ac..b1f4bdc 100644
--- a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out
@@ -12,9 +12,9 @@ POSTHOOK: Lineage: date_decimal_test.cdate EXPRESSION [(alltypesorc)alltypesorc.
 POSTHOOK: Lineage: date_decimal_test.cdecimal EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
-PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -41,12 +41,12 @@ STAGE PLANS:
               predicate: (cdouble is not null and cint is not null) (type: boolean)
               Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: cdate (type: date), cint (type: int), cdecimal (type: decimal(20,10))
-                outputColumnNames: _col0, _col1, _col2
+                expressions: cdate (type: date), cdecimal (type: decimal(20,10))
+                outputColumnNames: _col0, _col1
                 Select Vectorization:
                     className: VectorSelectOperator
                     native: true
-                    projectedOutputColumnNums: [2, 0, 3]
+                    projectedOutputColumnNums: [2, 3]
                 Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
@@ -81,21 +81,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-1970-01-07	528534767	-7959.5837837838
-1970-01-07	528534767	-2516.4135135135
-1970-01-07	528534767	-9445.0621621622
-1970-01-07	528534767	-5713.7459459459
-1970-01-07	528534767	8963.6405405405
-1970-01-07	528534767	4193.6243243243
-1970-01-07	528534767	2964.3864864865
-1970-01-07	528534767	-4673.2540540541
-1970-01-07	528534767	-9216.8945945946
-1970-01-07	528534767	-9287.3756756757
+1970-01-06	-7959.5837837838
+1970-01-06	-2516.4135135135
+1970-01-06	-9445.0621621622
+1970-01-06	-5713.7459459459
+1970-01-06	8963.6405405405
+1970-01-06	4193.6243243243
+1970-01-06	2964.3864864865
+1970-01-06	-4673.2540540541
+1970-01-06	-9216.8945945946
+1970-01-06	-9287.3756756757


[31/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
index a67aaa5..6532fcf 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
@@ -173,7 +173,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
index 2abec36..dce87f4 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
@@ -158,7 +158,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
index 8b4480f..9a21cda 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template TimestampScalarArithmeticDateColumnBase.txt.
@@ -104,7 +104,7 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector2.isRepeating) {
       if (inputColVector2.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+        scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
         dtm.<OperatorMethod>(
             value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
         outputColVector.setFromScratch<CamelReturnType>(0);
@@ -126,7 +126,7 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
              outputIsNull[i] = false;
-             scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+             scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
              dtm.<OperatorMethod>(
                  value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
              outputColVector.setFromScratch<CamelReturnType>(i);;
@@ -134,7 +134,7 @@ public class <ClassName> extends VectorExpression {
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+             scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
              dtm.<OperatorMethod>(
                  value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
              outputColVector.setFromScratch<CamelReturnType>(i);
@@ -149,7 +149,7 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
@@ -166,7 +166,7 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+            scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -179,7 +179,7 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+            scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -196,7 +196,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
+    return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
index bb664cf..dc4f5c8 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
@@ -195,7 +195,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
+    return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
index d3bb4a0..1b1117e 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
@@ -185,7 +185,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
+    return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
index b99bcf9..c409a6b 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
@@ -162,7 +162,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", "  + getColumnParamString(1, colNum);
+    return "val " + value.toString() + ", "  + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
index 810f31f..abb7b22 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
 
 import com.google.common.base.Preconditions;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
index 5114cda..579437e 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
 * <ClassName>. Vectorized implementation for MIN/MAX aggregates.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index 61fb3d3..a53ff5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -319,7 +319,7 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> {
   private Date readDateValue(String dateStr) {
     // try either yyyy-mm-dd, or integer representing days since epoch
     try {
-      DateWritableV2 writableVal = new DateWritableV2(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr));
+      DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr));
       return new Date(writableVal.getDays());
     } catch (IllegalArgumentException err) {
       // Fallback to integer parsing

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 9ddfc06..e77fe18 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -289,15 +289,15 @@ public final class FunctionRegistry {
     system.registerGenericUDF(UNARY_PLUS_FUNC_NAME, GenericUDFOPPositive.class);
     system.registerGenericUDF(UNARY_MINUS_FUNC_NAME, GenericUDFOPNegative.class);
 
-    system.registerGenericUDF("day", UDFDayOfMonth.class);
-    system.registerGenericUDF("dayofmonth", UDFDayOfMonth.class);
+    system.registerUDF("day", UDFDayOfMonth.class, false);
+    system.registerUDF("dayofmonth", UDFDayOfMonth.class, false);
     system.registerUDF("dayofweek", UDFDayOfWeek.class, false);
-    system.registerGenericUDF("month", UDFMonth.class);
+    system.registerUDF("month", UDFMonth.class, false);
     system.registerGenericUDF("quarter", GenericUDFQuarter.class);
-    system.registerGenericUDF("year", UDFYear.class);
-    system.registerGenericUDF("hour", UDFHour.class);
-    system.registerGenericUDF("minute", UDFMinute.class);
-    system.registerGenericUDF("second", UDFSecond.class);
+    system.registerUDF("year", UDFYear.class, false);
+    system.registerUDF("hour", UDFHour.class, false);
+    system.registerUDF("minute", UDFMinute.class, false);
+    system.registerUDF("second", UDFSecond.class, false);
     system.registerUDF("from_unixtime", UDFFromUnixTime.class, false);
     system.registerGenericUDF("to_date", GenericUDFDate.class);
     system.registerUDF("weekofyear", UDFWeekOfYear.class, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
index 7798652..58252e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
@@ -20,10 +20,9 @@ package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public final class TimestampUtils {
 
@@ -31,19 +30,13 @@ public final class TimestampUtils {
   static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
 
   public static long daysToNanoseconds(long daysSinceEpoch) {
-    return DateWritableV2.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
+    return DateWritable.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
   }
 
-  public static TimestampWritableV2 timestampColumnVectorWritable(
+  public static TimestampWritable timestampColumnVectorWritable(
       TimestampColumnVector timestampColVector, int elementNum,
-      TimestampWritableV2 timestampWritable) {
-    java.sql.Timestamp ts = timestampColVector.asScratchTimestamp(elementNum);
-    if (ts == null) {
-      timestampWritable.set((Timestamp) null);
-      return timestampWritable;
-    }
-    Timestamp newTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
-    timestampWritable.set(newTS);
+      TimestampWritable timestampWritable) {
+    timestampWritable.set(timestampColVector.asScratchTimestamp(elementNum));
     return timestampWritable;
   }
 
@@ -53,14 +46,4 @@ public final class TimestampUtils {
     intervalDayTimeWritable.set(intervalDayTimeColVector.asScratchIntervalDayTime(elementNum));
     return intervalDayTimeWritable;
   }
-
-  public static String timestampScalarTypeToString(Object o) {
-    if (o instanceof java.sql.Timestamp) {
-      // Special handling for timestamp
-      java.sql.Timestamp ts = (java.sql.Timestamp) o;
-      return org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
-          ts.getTime(), ts.getNanos()).toString();
-    }
-    return o.toString();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index d1dcad9..e96619c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
@@ -38,10 +38,10 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -49,7 +49,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -174,7 +174,7 @@ public class VectorAssignRow {
           ((PrimitiveTypeInfo) targetTypeInfos[logicalColumnIndex]).getPrimitiveCategory();
       switch (targetPrimitiveCategory) {
       case DATE:
-        convertTargetWritables[logicalColumnIndex] = new DateWritableV2();
+        convertTargetWritables[logicalColumnIndex] = new DateWritable();
         break;
       case STRING:
         convertTargetWritables[logicalColumnIndex] = new Text();
@@ -414,19 +414,19 @@ public class VectorAssignRow {
         case TIMESTAMP:
           if (object instanceof Timestamp) {
             ((TimestampColumnVector) columnVector).set(
-                batchIndex, ((Timestamp) object).toSqlTimestamp());
+                batchIndex, ((Timestamp) object));
           } else {
             ((TimestampColumnVector) columnVector).set(
-                batchIndex, ((TimestampWritableV2) object).getTimestamp().toSqlTimestamp());
+                batchIndex, ((TimestampWritable) object).getTimestamp());
           }
           break;
         case DATE:
           if (object instanceof Date) {
             ((LongColumnVector) columnVector).vector[batchIndex] =
-                DateWritableV2.dateToDays((Date) object);
+                DateWritable.dateToDays((Date) object);
           } else {
             ((LongColumnVector) columnVector).vector[batchIndex] =
-               ((DateWritableV2) object).getDays();
+               ((DateWritable) object).getDays();
           }
           break;
         case FLOAT:
@@ -711,7 +711,7 @@ public class VectorAssignRow {
               return;
             }
             ((TimestampColumnVector) columnVector).set(
-                batchIndex, timestamp.toSqlTimestamp());
+                batchIndex, timestamp);
           }
           break;
         case DATE:
@@ -722,9 +722,9 @@ public class VectorAssignRow {
               VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
               return;
             }
-            DateWritableV2 dateWritable = (DateWritableV2) convertTargetWritable;
+            DateWritable dateWritable = (DateWritable) convertTargetWritable;
             if (dateWritable == null) {
-              dateWritable = new DateWritableV2();
+              dateWritable = new DateWritable();
             }
             dateWritable.set(date);
             ((LongColumnVector) columnVector).vector[batchIndex] =

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
index 839e1e9..47eaf36 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -76,9 +77,9 @@ public class VectorBatchDebug {
           } else if (colVector instanceof DecimalColumnVector) {
             sb.append(((DecimalColumnVector) colVector).vector[index].toString());
           } else if (colVector instanceof TimestampColumnVector) {
-            java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
+            Timestamp timestamp = new Timestamp(0);
             ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
-            sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
+            sb.append(timestamp.toString());
           } else if (colVector instanceof IntervalDayTimeColumnVector) {
             HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
             sb.append(intervalDayTime.toString());

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
index 39a124f..c555464 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -26,10 +27,9 @@ import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -50,6 +50,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * This class is used as a static factory for VectorColumnAssign.
@@ -169,10 +170,10 @@ public class VectorColumnAssignFactory {
   extends VectorColumnAssignVectorBase<TimestampColumnVector> {
 
     protected void assignTimestamp(Timestamp value, int index) {
-      outCol.set(index, value.toSqlTimestamp());
+      outCol.set(index, value);
     }
-    protected void assignTimestamp(TimestampWritableV2 tw, int index) {
-      outCol.set(index, tw.getTimestamp().toSqlTimestamp());
+    protected void assignTimestamp(TimestampWritable tw, int index) {
+      outCol.set(index, tw.getTimestamp());
     }
   }
 
@@ -341,7 +342,7 @@ public class VectorColumnAssignFactory {
               assignNull(destIndex);
             }
             else {
-              assignTimestamp((TimestampWritableV2) val, destIndex);
+              assignTimestamp((TimestampWritable) val, destIndex);
             }
           }
         }.init(outputBatch, (TimestampColumnVector) destCol);
@@ -354,7 +355,7 @@ public class VectorColumnAssignFactory {
               assignNull(destIndex);
             }
             else {
-              DateWritableV2 bw = (DateWritableV2) val;
+              DateWritable bw = (DateWritable) val;
               assignLong(bw.getDays(), destIndex);
             }
           }
@@ -584,7 +585,7 @@ public class VectorColumnAssignFactory {
         vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.STRING);
       } else if (writables[i] instanceof BytesWritable) {
         vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BINARY);
-      } else if (writables[i] instanceof TimestampWritableV2) {
+      } else if (writables[i] instanceof TimestampWritable) {
         vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.TIMESTAMP);
       } else if (writables[i] instanceof HiveIntervalYearMonthWritable) {
         vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_YEAR_MONTH);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 55f3556..8ea625e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
@@ -32,7 +31,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -556,7 +556,7 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
       break;
     case TIMESTAMP:
       ((TimestampColumnVector) colVector).set(
-          batchIndex, deserializeRead.currentTimestampWritable.getTimestamp().toSqlTimestamp());
+          batchIndex, deserializeRead.currentTimestampWritable.getTimestamp());
       break;
     case DATE:
       ((LongColumnVector) colVector).vector[batchIndex] = deserializeRead.currentDateWritable.getDays();
@@ -1079,17 +1079,17 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
     case TIMESTAMP:
       {
         if (writable == null) {
-          writable = new TimestampWritableV2();
+          writable = new TimestampWritable();
         }
-        ((TimestampWritableV2) writable).set(deserializeRead.currentTimestampWritable);
+        ((TimestampWritable) writable).set(deserializeRead.currentTimestampWritable);
       }
       break;
     case DATE:
       {
         if (writable == null) {
-          writable = new DateWritableV2();
+          writable = new DateWritable();
         }
-        ((DateWritableV2) writable).set(deserializeRead.currentDateWritable);
+        ((DateWritable) writable).set(deserializeRead.currentDateWritable);
       }
       break;
     case FLOAT:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index f5f4d72..152d75b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -23,12 +23,10 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -38,6 +36,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -45,6 +44,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -237,14 +237,11 @@ public class VectorExtractRow {
               ((LongColumnVector) colVector).vector[adjustedIndex]);
           return primitiveWritable;
         case TIMESTAMP:
-          // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
-          java.sql.Timestamp ts =
-              ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
-          Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
-          ((TimestampWritableV2) primitiveWritable).set(serializableTS);
+          ((TimestampWritable) primitiveWritable).set(
+              ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex));
           return primitiveWritable;
         case DATE:
-          ((DateWritableV2) primitiveWritable).set(
+          ((DateWritable) primitiveWritable).set(
               (int) ((LongColumnVector) colVector).vector[adjustedIndex]);
           return primitiveWritable;
         case FLOAT:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index 38c31a5..1f46f2c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hive.common.util.Murmur3;
 
 import java.sql.Date;
@@ -31,9 +30,11 @@ import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 import com.google.common.base.Preconditions;
 
@@ -412,7 +413,7 @@ public class VectorHashKeyWrapper extends KeyWrapper {
           case DATE:
             {
               Date dt = new Date(0);
-              dt.setTime(DateWritableV2.daysToMillis((int) longValues[i]));
+              dt.setTime(DateWritable.daysToMillis((int) longValues[i]));
               sb.append(" date ");
               sb.append(dt.toString());
             }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index 66585af..cb2efb7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -433,10 +432,7 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
       serializeWrite.writeDate((int) ((LongColumnVector) colVector).vector[adjustedBatchIndex]);
       break;
     case TIMESTAMP:
-      // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
-      java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex);
-      Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
-      serializeWrite.writeTimestamp(serializableTS);
+      serializeWrite.writeTimestamp(((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex));
       break;
     case FLOAT:
       serializeWrite.writeFloat((float) ((DoubleColumnVector) colVector).vector[adjustedBatchIndex]);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 5631347..84a0a3a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.lang.reflect.Constructor;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -34,7 +35,6 @@ import java.util.TreeSet;
 import java.util.regex.Pattern;
 
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.hive.common.type.Date;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
@@ -69,7 +69,7 @@ import org.apache.hadoop.hive.ql.udf.*;
 import org.apache.hadoop.hive.ql.udf.generic.*;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -1392,10 +1392,9 @@ public class VectorizationContext {
     case INT_FAMILY:
       return new ConstantVectorExpression(outCol, ((Number) constantValue).longValue(), typeInfo);
     case DATE:
-      return new ConstantVectorExpression(outCol, DateWritableV2.dateToDays((Date) constantValue), typeInfo);
+      return new ConstantVectorExpression(outCol, DateWritable.dateToDays((Date) constantValue), typeInfo);
     case TIMESTAMP:
-      return new ConstantVectorExpression(outCol,
-          ((org.apache.hadoop.hive.common.type.Timestamp) constantValue).toSqlTimestamp(), typeInfo);
+      return new ConstantVectorExpression(outCol, (Timestamp) constantValue, typeInfo);
     case INTERVAL_YEAR_MONTH:
       return new ConstantVectorExpression(outCol,
           ((HiveIntervalYearMonth) constantValue).getTotalMonths(), typeInfo);
@@ -2193,10 +2192,10 @@ public class VectorizationContext {
       return InConstantType.INT_FAMILY;
 
     case DATE:
-      return InConstantType.DATE;
+      return InConstantType.TIMESTAMP;
 
     case TIMESTAMP:
-      return InConstantType.TIMESTAMP;
+      return InConstantType.DATE;
 
     case FLOAT:
     case DOUBLE:
@@ -2802,8 +2801,6 @@ public class VectorizationContext {
       return createVectorExpression(CastDecimalToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isDateFamily(inputType)) {
       return createVectorExpression(CastDateToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
-    } else if (isTimestampFamily(inputType)) {
-      return createVectorExpression(CastTimestampToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isStringFamily(inputType)) {
       return createVectorExpression(CastStringGroupToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     }
@@ -2832,8 +2829,6 @@ public class VectorizationContext {
       return createVectorExpression(CastDecimalToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isDateFamily(inputType)) {
       return createVectorExpression(CastDateToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
-    } else if (isTimestampFamily(inputType)) {
-      return createVectorExpression(CastTimestampToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isStringFamily(inputType)) {
       return createVectorExpression(CastStringGroupToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     }
@@ -2862,8 +2857,6 @@ public class VectorizationContext {
       return createVectorExpression(CastDecimalToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isDateFamily(inputType)) {
       return createVectorExpression(CastDateToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
-    } else if (isTimestampFamily(inputType)) {
-      return createVectorExpression(CastTimestampToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (isStringFamily(inputType)) {
       return createVectorExpression(CastStringGroupToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     }
@@ -3520,9 +3513,7 @@ public class VectorizationContext {
     Object scalarValue = getScalarValue(constDesc);
     switch (primitiveCategory) {
       case DATE:
-        return new Long(DateWritableV2.dateToDays((Date) scalarValue));
-      case TIMESTAMP:
-        return ((org.apache.hadoop.hive.common.type.Timestamp) scalarValue).toSqlTimestamp();
+        return new Long(DateWritable.dateToDays((Date) scalarValue));
       case INTERVAL_YEAR_MONTH:
         return ((HiveIntervalYearMonth) scalarValue).getTotalMonths();
       default:
@@ -3567,10 +3558,10 @@ public class VectorizationContext {
     Object constant = evaluator.evaluate(null);
     Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
 
-    if (!(java instanceof org.apache.hadoop.hive.common.type.Timestamp)) {
+    if (!(java instanceof Timestamp)) {
       throw new HiveException("Udf: failed to convert to timestamp");
     }
-    Timestamp ts = ((org.apache.hadoop.hive.common.type.Timestamp) java).toSqlTimestamp();
+    Timestamp ts = (Timestamp) java;
     return ts;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index 211f452..d51d44a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -19,22 +19,29 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -42,15 +49,19 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -59,6 +70,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -67,8 +79,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hive.common.util.DateUtils;
 
 public class VectorizedBatchUtil {
   private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class);
@@ -367,7 +378,7 @@ public class VectorizedBatchUtil {
     case DATE: {
       LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
       if (writableCol != null) {
-        lcv.vector[rowIndex] = ((DateWritableV2) writableCol).getDays();
+        lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
         lcv.isNull[rowIndex] = false;
       } else {
         lcv.vector[rowIndex] = 1;
@@ -400,7 +411,7 @@ public class VectorizedBatchUtil {
     case TIMESTAMP: {
       TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
       if (writableCol != null) {
-        lcv.set(rowIndex, ((TimestampWritableV2) writableCol).getTimestamp().toSqlTimestamp());
+        lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
         lcv.isNull[rowIndex] = false;
       } else {
         lcv.setNullValue(rowIndex);
@@ -733,7 +744,7 @@ public class VectorizedBatchUtil {
         if (sourceColVector.noNulls) {
           for (int i = 0; i < size; i++) {
             targetTime[i] = sourceTime[i];
-            targetNanos[i] = sourceNanos[i];
+            targetNanos[i] = targetNanos[i];
           }
         } else {
           boolean[] sourceIsNull = sourceColVector.isNull;
@@ -742,7 +753,7 @@ public class VectorizedBatchUtil {
           for (int i = 0; i < size; i++) {
             if (!sourceIsNull[i]) {
               targetTime[i] = sourceTime[i];
-              targetNanos[i] = sourceNanos[i];
+              targetNanos[i] = targetNanos[i];
             } else {
               targetTime[i] = 0;
               targetNanos[i] = 0;
@@ -888,9 +899,9 @@ public class VectorizedBatchUtil {
     case LONG:
       return new LongWritable(0);
     case TIMESTAMP:
-      return new TimestampWritableV2(new Timestamp());
+      return new TimestampWritable(new Timestamp(0));
     case DATE:
-      return new DateWritableV2(new Date());
+      return new DateWritable(new Date(0));
     case FLOAT:
       return new FloatWritable(0);
     case DOUBLE:
@@ -965,9 +976,9 @@ public class VectorizedBatchUtil {
           } else if (colVector instanceof DecimalColumnVector) {
             sb.append(((DecimalColumnVector) colVector).vector[index].toString());
           } else if (colVector instanceof TimestampColumnVector) {
-            java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
+            Timestamp timestamp = new Timestamp(0);
             ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
-            sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
+            sb.append(timestamp.toString());
           } else if (colVector instanceof IntervalDayTimeColumnVector) {
             HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
             sb.append(intervalDayTime.toString());

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index ca17d4b..ffbfb6f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -18,38 +18,45 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.IOPrepareCache;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
+import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.FileSplit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hive.common.util.DateUtils;
 
 import com.google.common.base.Preconditions;
 
@@ -480,7 +487,7 @@ public class VectorizedRowBatchCtx {
             lcv.isNull[0] = true;
             lcv.isRepeating = true;
           } else {
-            lcv.fill(DateWritableV2.dateToDays((Date) value));
+            lcv.fill(DateWritable.dateToDays((Date) value));
           }
         }
         break;
@@ -492,7 +499,7 @@ public class VectorizedRowBatchCtx {
             lcv.isNull[0] = true;
             lcv.isRepeating = true;
           } else {
-            lcv.fill(((Timestamp) value).toSqlTimestamp());
+            lcv.fill((Timestamp) value);
           }
         }
         break;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
index dfa9f8a..e559886 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
@@ -19,27 +19,20 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 import java.sql.Date;
-import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 
 public class CastDateToString extends LongToStringUnaryUDF {
   private static final long serialVersionUID = 1L;
   protected transient Date dt = new Date(0);
-  private transient SimpleDateFormat formatter;
 
   public CastDateToString() {
     super();
-    formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
   }
 
   public CastDateToString(int inputColumn, int outputColumnNum) {
     super(inputColumn, outputColumnNum);
-    formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
   }
 
   // The assign method will be overridden for CHAR and VARCHAR.
@@ -49,8 +42,8 @@ public class CastDateToString extends LongToStringUnaryUDF {
 
   @Override
   protected void func(BytesColumnVector outV, long[] vector, int i) {
-    dt.setTime(DateWritableV2.daysToMillis((int) vector[i]));
-    byte[] temp = formatter.format(dt).getBytes();
+    dt.setTime(DateWritable.daysToMillis((int) vector[i]));
+    byte[] temp = dt.toString().getBytes();
     assign(outV, i, temp, temp.length);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
index 37a48e6..dbd7c01 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 public class CastDateToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -45,7 +45,7 @@ public class CastDateToTimestamp extends VectorExpression {
   }
 
   private void setDays(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
-    timestampColVector.getScratchTimestamp().setTime(DateWritableV2.daysToMillis((int) vector[elementNum]));
+    timestampColVector.getScratchTimestamp().setTime(DateWritable.daysToMillis((int) vector[elementNum]));
     timestampColVector.setFromScratchTimestamp(elementNum);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
index f99bd69..b2185d9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
index ebd14e8..9e0c00e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
index bea2240..a9ede6b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastMillisecondsLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
index a6dff12..6edd7b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hive.common.util.DateParser;
 
 import java.nio.charset.StandardCharsets;
@@ -38,6 +38,7 @@ public class CastStringToDate extends VectorExpression {
 
   private final int inputColumn;
 
+  private transient final java.sql.Date sqlDate = new java.sql.Date(0);
   private transient final DateParser dateParser = new DateParser();
 
   public CastStringToDate() {
@@ -153,9 +154,8 @@ public class CastStringToDate extends VectorExpression {
 
   private void evaluate(LongColumnVector outputColVector, BytesColumnVector inV, int i) {
     String dateString = new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
-    Date hDate = new Date();
-    if (dateParser.parseDate(dateString, hDate)) {
-      outputColVector.vector[i] = DateWritableV2.dateToDays(hDate);
+    if (dateParser.parseDate(dateString, sqlDate)) {
+      outputColVector.vector[i] = DateWritable.dateToDays(sqlDate);
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
deleted file mode 100644
index d363ad9..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-
-public class CastTimestampToChar extends CastTimestampToString implements TruncStringOutput {
-
-  private static final long serialVersionUID = 1L;
-  private int maxLength; // Must be manually set with setMaxLength.
-
-  public CastTimestampToChar() {
-    super();
-  }
-
-  public CastTimestampToChar(int inputColumn, int outputColumnNum) {
-    super(inputColumn, outputColumnNum);
-  }
-
-  @Override
-  protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
-    StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength);
-  }
-
-  @Override
-  public int getMaxLength() {
-    return maxLength;
-  }
-
-  @Override
-  public void setMaxLength(int maxLength) {
-    this.maxLength = maxLength;
-  }
-
-  public String vectorExpressionParameters() {
-    return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
index ba7e91a..6a41bb0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * To be used to cast timestamp to decimal.
@@ -40,6 +40,6 @@ public class CastTimestampToDate extends FuncTimestampToLong {
   @Override
   protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
 
-    outV.vector[i] = DateWritableV2.millisToDays(inV.getTime(i));
+    outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index 92aade0..ebe18a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,10 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * To be used to cast timestamp to decimal.
@@ -40,8 +39,6 @@ public class CastTimestampToDecimal extends FuncTimestampToDecimal {
 
   @Override
   protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
-    Double timestampDouble = TimestampUtils.getDouble(inV.asScratchTimestamp(i));
-    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
-    outV.set(i, HiveDecimal.create(timestampDouble.toString()));
+    outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
index bf5e105..eedde7a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastTimestampToDouble extends VectorExpression {
   private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
deleted file mode 100644
index 0e20cf1..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-
-import java.sql.Timestamp;
-import java.time.Instant;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.temporal.ChronoField;
-
-public class CastTimestampToString extends TimestampToStringUnaryUDF {
-  private static final long serialVersionUID = 1L;
-  protected transient Timestamp dt = new Timestamp(0);
-  private static final DateTimeFormatter PRINT_FORMATTER;
-
-  static {
-    DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
-    // Date and time parts
-    builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
-    // Fractional part
-    builder.optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd();
-    PRINT_FORMATTER = builder.toFormatter();
-  }
-
-  public CastTimestampToString() {
-    super();
-  }
-
-  public CastTimestampToString(int inputColumn, int outputColumnNum) {
-    super(inputColumn, outputColumnNum);
-  }
-
-  // The assign method will be overridden for CHAR and VARCHAR.
-  protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
-    outV.setVal(i, bytes, 0, length);
-  }
-
-  @Override
-  protected void func(BytesColumnVector outV, TimestampColumnVector inV, int i) {
-    dt.setTime(inV.time[i]);
-    dt.setNanos(inV.nanos[i]);
-    byte[] temp = LocalDateTime.ofInstant(Instant.ofEpochMilli(inV.time[i]), ZoneOffset.UTC)
-        .withNano(inV.nanos[i])
-        .format(PRINT_FORMATTER).getBytes();
-    assign(outV, i, temp, temp.length);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
deleted file mode 100644
index da740fa..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-
-public class CastTimestampToVarChar extends CastTimestampToString implements TruncStringOutput {
-
-  private static final long serialVersionUID = 1L;
-  private int maxLength; // Must be manually set with setMaxLength.
-
-  public CastTimestampToVarChar() {
-    super();
-  }
-
-  public CastTimestampToVarChar(int inputColumn, int outputColumnNum) {
-    super(inputColumn, outputColumnNum);
-  }
-
-  @Override
-  protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
-    StringExpr.truncate(outV, i, bytes, 0, length, maxLength);
-  }
-
-  @Override
-  public int getMaxLength() {
-    return maxLength;
-  }
-
-  @Override
-  public void setMaxLength(int maxLength) {
-    this.maxLength = maxLength;
-  }
-
-  @Override
-  public String vectorExpressionParameters() {
-    return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
index 8ae8a54..4cc1be5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
@@ -307,8 +307,7 @@ public class ConstantVectorExpression extends VectorExpression {
         value = decimalValue.toString();
         break;
       case TIMESTAMP:
-        value = org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
-            timestampValue.getTime(), timestampValue.getNanos()).toString();
+        value = timestampValue.toString();
         break;
       case INTERVAL_DAY_TIME:
         value = intervalDayTimeValue.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index 79d1ad1..d963b87 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -20,11 +20,14 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.sql.Timestamp;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
 // type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
@@ -93,38 +96,38 @@ public class DateColSubtractDateColumn extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
-      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
       outputColVector.setFromScratchIntervalDayTime(0);
     } else if (inputColVector1.isRepeating) {
-      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else if (inputColVector2.isRepeating) {
-      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -133,15 +136,15 @@ public class DateColSubtractDateColumn extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
index a1745c8..8942b78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
@@ -21,13 +21,15 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import java.sql.Timestamp;
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
 // type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
@@ -45,7 +47,7 @@ public class DateColSubtractDateScalar extends VectorExpression {
     super(outputColumnNum);
     this.colNum = colNum;
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritableV2.daysToMillis((int) value));
+    this.value.setTime(DateWritable.daysToMillis((int) value));
   }
 
   public DateColSubtractDateScalar() {
@@ -86,7 +88,7 @@ public class DateColSubtractDateScalar extends VectorExpression {
     if (inputColVector1.isRepeating) {
       if (inputColVector1.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+        scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
         dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
         outputColVector.setFromScratchIntervalDayTime(0);
 
@@ -109,14 +111,14 @@ public class DateColSubtractDateScalar extends VectorExpression {
            final int i = sel[j];
            // Set isNull before call in case it changes it mind.
            outputIsNull[i] = false;
-           scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+           scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
            dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
            outputColVector.setFromScratchIntervalDayTime(i);
          }
         } else {
           for(int j = 0; j != n; j++) {
             final int i = sel[j];
-            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
             outputColVector.setFromScratchIntervalDayTime(i);
           }
@@ -130,7 +132,7 @@ public class DateColSubtractDateScalar extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -144,14 +146,14 @@ public class DateColSubtractDateScalar extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = inputIsNull[i];
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
           outputColVector.setFromScratchIntervalDayTime(i);
         }
@@ -163,7 +165,7 @@ public class DateColSubtractDateScalar extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + Date.ofEpochMilli(value.getTime());
+    return getColumnParamString(0, colNum) + ", val " + value;
   }
 
   @Override


[12/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/partition_timestamp2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_timestamp2.q.out b/ql/src/test/results/clientpositive/partition_timestamp2.q.out
index 0928abf..3a4770e 100644
--- a/ql/src/test/results/clientpositive/partition_timestamp2.q.out
+++ b/ql/src/test/results/clientpositive/partition_timestamp2.q.out
@@ -17,10 +17,10 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01
 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select *
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 POSTHOOK: query: from (select * from src tablesample (1 rows)) x
 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1) select *
 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 01:00:00', region=1) select *
@@ -28,33 +28,33 @@ insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01
 insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1999-01-01 01:00:00', region=2) select *
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 00:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1999-01-01 01:00:00.0,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 01:00:00.0,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: select distinct dt from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 1999-01-01 00:00:00
 1999-01-01 01:00:00
@@ -63,18 +63,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio
 PREHOOK: query: select * from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1999-01-01 00:00:00	2
 238	val_238	1999-01-01 01:00:00	2
@@ -84,29 +84,29 @@ PREHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=times
   select 'changed_key', 'changed_value' from src tablesample (2 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
+PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
 POSTHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1)
   select 'changed_key', 'changed_value' from src tablesample (2 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).key SIMPLE []
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00,region=1).value SIMPLE []
+POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).key SIMPLE []
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=2000-01-01 00:00:00.0,region=1).value SIMPLE []
 PREHOOK: query: select * from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1999-01-01 00:00:00	2
 238	val_238	1999-01-01 01:00:00	2
@@ -115,25 +115,25 @@ changed_key	changed_value	2000-01-01 00:00:00	1
 238	val_238	2000-01-01 01:00:00	1
 PREHOOK: query: truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1)
 PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
+PREHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
 POSTHOOK: query: truncate table partition_timestamp2_1 partition(dt=timestamp '2000-01-01 00:00:00', region=1)
 POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
+POSTHOOK: Output: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
 PREHOOK: query: select distinct dt from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 1999-01-01 00:00:00
 1999-01-01 01:00:00
@@ -141,18 +141,18 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio
 PREHOOK: query: select * from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1999-01-01 00:00:00	2
 238	val_238	1999-01-01 01:00:00	2
@@ -163,24 +163,24 @@ PREHOOK: Output: default@partition_timestamp2_1
 POSTHOOK: query: alter table partition_timestamp2_1 add partition (dt=timestamp '1980-01-02 00:00:00', region=3)
 POSTHOOK: type: ALTERTABLE_ADDPARTS
 POSTHOOK: Output: default@partition_timestamp2_1
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 PREHOOK: query: select distinct dt from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 1999-01-01 00:00:00
 1999-01-01 01:00:00
@@ -188,20 +188,20 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio
 PREHOOK: query: select * from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1999-01-01 00:00:00	2
 238	val_238	1999-01-01 01:00:00	2
@@ -209,44 +209,44 @@ POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/regio
 PREHOOK: query: alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2)
 PREHOOK: type: ALTERTABLE_DROPPARTS
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
+PREHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
 POSTHOOK: query: alter table partition_timestamp2_1 drop partition (dt=timestamp '1999-01-01 01:00:00', region=2)
 POSTHOOK: type: ALTERTABLE_DROPPARTS
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00/region=2
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1999-01-01 01%3A00%3A00.0/region=2
 PREHOOK: query: select distinct dt from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select distinct dt from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 1999-01-01 00:00:00
 2000-01-01 01:00:00
 PREHOOK: query: select * from partition_timestamp2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1999-01-01 00:00:00	2
 238	val_238	2000-01-01 01:00:00	1
@@ -254,24 +254,24 @@ PREHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-
   set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
 PREHOOK: type: ALTERPARTITION_SERIALIZER
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 POSTHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
   set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
 POSTHOOK: type: ALTERPARTITION_SERIALIZER
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 PREHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
   set fileformat rcfile
 PREHOOK: type: ALTERPARTITION_FILEFORMAT
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 POSTHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
   set fileformat rcfile
 POSTHOOK: type: ALTERPARTITION_FILEFORMAT
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 PREHOOK: query: describe extended partition_timestamp2_1  partition(dt=timestamp '1980-01-02 00:00:00', region=3)
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partition_timestamp2_1
@@ -293,29 +293,29 @@ PREHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=times
   select * from src tablesample (2 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
-PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 POSTHOOK: query: insert overwrite table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
   select * from src tablesample (2 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_timestamp2_1 PARTITION(dt=1980-01-02 00:00:00.0,region=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+PREHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+PREHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from partition_timestamp2_1 order by key,value,dt,region
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00/region=2
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00/region=1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1999-01-01 00%3A00%3A00.0/region=2
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 00%3A00%3A00.0/region=1
+POSTHOOK: Input: default@partition_timestamp2_1@dt=2000-01-01 01%3A00%3A00.0/region=1
 #### A masked pattern was here ####
 238	val_238	1980-01-02 00:00:00	3
 238	val_238	1999-01-01 00:00:00	2
@@ -325,14 +325,14 @@ PREHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-
 #### A masked pattern was here ####
 PREHOOK: type: ALTERPARTITION_LOCATION
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 #### A masked pattern was here ####
 POSTHOOK: query: alter table partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERPARTITION_LOCATION
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 #### A masked pattern was here ####
 PREHOOK: query: describe extended partition_timestamp2_1 partition(dt=timestamp '1980-01-02 00:00:00', region=3)
 PREHOOK: type: DESCTABLE
@@ -354,12 +354,12 @@ region              	int
 PREHOOK: query: alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3)
 PREHOOK: type: ALTERTABLE_TOUCH
 PREHOOK: Input: default@partition_timestamp2_1
-PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+PREHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 POSTHOOK: query: alter table partition_timestamp2_1 touch partition(dt=timestamp '1980-01-02 00:00:00', region=3)
 POSTHOOK: type: ALTERTABLE_TOUCH
 POSTHOOK: Input: default@partition_timestamp2_1
-POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
-POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00/region=3
+POSTHOOK: Input: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
+POSTHOOK: Output: default@partition_timestamp2_1@dt=1980-01-02 00%3A00%3A00.0/region=3
 PREHOOK: query: drop table partition_timestamp2_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@partition_timestamp2_1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query12.q.out b/ql/src/test/results/clientpositive/perf/spark/query12.q.out
index 07fa840..0a0663e 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query12.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query12.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query16.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query16.q.out b/ql/src/test/results/clientpositive/perf/spark/query16.q.out
index ddc4609..96a28a4 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query16.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query16.q.out
@@ -99,7 +99,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-04-01 00:00:00' AND TIMESTAMP'2001-05-31 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-04-01 00:00:00.0' AND TIMESTAMP'2001-05-31 01:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query20.q.out b/ql/src/test/results/clientpositive/perf/spark/query20.q.out
index d2589b3..0544df9 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query20.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query20.q.out
@@ -70,7 +70,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query21.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query21.q.out b/ql/src/test/results/clientpositive/perf/spark/query21.q.out
index d02858c..e6db04e 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query21.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query21.q.out
@@ -97,7 +97,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-09 00:00:00' AND TIMESTAMP'1998-05-08 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-08 23:00:00.0' AND TIMESTAMP'1998-05-08 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int), d_date (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query32.q.out b/ql/src/test/results/clientpositive/perf/spark/query32.q.out
index 4f06700..bb17957 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query32.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query32.q.out
@@ -69,7 +69,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -93,7 +93,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query37.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query37.q.out b/ql/src/test/results/clientpositive/perf/spark/query37.q.out
index 96d29f5..07e8eda 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query37.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query37.q.out
@@ -46,7 +46,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-06-02 00:00:00' AND TIMESTAMP'2001-08-01 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-06-02 00:00:00.0' AND TIMESTAMP'2001-08-01 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query40.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query40.q.out b/ql/src/test/results/clientpositive/perf/spark/query40.q.out
index d991e37..cf54ed8 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query40.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query40.q.out
@@ -93,7 +93,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-09 00:00:00' AND TIMESTAMP'1998-05-08 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-08 23:00:00.0' AND TIMESTAMP'1998-05-08 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int), d_date (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query5.q.out b/ql/src/test/results/clientpositive/perf/spark/query5.q.out
index 1b1ff7d..d93e089 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query5.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query5.q.out
@@ -366,7 +366,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-08-18 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-08-18 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -479,7 +479,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-08-18 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-08-18 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query77.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query77.q.out b/ql/src/test/results/clientpositive/perf/spark/query77.q.out
index 186de98..e4362a6 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query77.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query77.q.out
@@ -281,7 +281,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -360,7 +360,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -465,7 +465,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -558,7 +558,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -576,7 +576,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query80.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query80.q.out b/ql/src/test/results/clientpositive/perf/spark/query80.q.out
index f31bf31..d1710e2 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query80.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query80.q.out
@@ -252,7 +252,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -300,7 +300,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -367,7 +367,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query82.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query82.q.out b/ql/src/test/results/clientpositive/perf/spark/query82.q.out
index 86465f2..6ccd1cb 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query82.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query82.q.out
@@ -46,7 +46,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2002-05-30 00:00:00' AND TIMESTAMP'2002-07-29 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2002-05-30 00:00:00.0' AND TIMESTAMP'2002-07-29 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query92.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query92.q.out b/ql/src/test/results/clientpositive/perf/spark/query92.q.out
index bab4687..ae63392 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query92.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query92.q.out
@@ -73,7 +73,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)
@@ -97,7 +97,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query94.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query94.q.out b/ql/src/test/results/clientpositive/perf/spark/query94.q.out
index 86bbc0e..dde0b17 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query94.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query94.q.out
@@ -95,7 +95,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00' AND TIMESTAMP'1999-06-30 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00.0' AND TIMESTAMP'1999-06-30 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query95.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query95.q.out b/ql/src/test/results/clientpositive/perf/spark/query95.q.out
index 5f31595..b0df142 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query95.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query95.q.out
@@ -101,7 +101,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00' AND TIMESTAMP'1999-06-30 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00.0' AND TIMESTAMP'1999-06-30 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/spark/query98.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query98.q.out b/ql/src/test/results/clientpositive/perf/spark/query98.q.out
index 56beb49..c69032b 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query98.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query98.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
                   alias: date_dim
                   Statistics: Num rows: 73049 Data size: 81741831 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null) (type: boolean)
+                    predicate: (CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null) (type: boolean)
                     Statistics: Num rows: 8116 Data size: 9081804 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query12.q.out b/ql/src/test/results/clientpositive/perf/tez/query12.q.out
index 4967de6..6e304aa 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query12.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query12.q.out
@@ -130,7 +130,7 @@ Stage-0
                                     Select Operator [SEL_43] (rows=8116 width=1119)
                                       Output:["_col0"]
                                       Filter Operator [FIL_42] (rows=8116 width=1119)
-                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null)
+                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null)
                                         TableScan [TS_3] (rows=73049 width=1119)
                                           default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query16.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query16.q.out b/ql/src/test/results/clientpositive/perf/tez/query16.q.out
index d636874..7df8385 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query16.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query16.q.out
@@ -186,7 +186,7 @@ Stage-0
                                                       Select Operator [SEL_88] (rows=8116 width=1119)
                                                         Output:["_col0"]
                                                         Filter Operator [FIL_87] (rows=8116 width=1119)
-                                                          predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-04-01 00:00:00' AND TIMESTAMP'2001-05-31 00:00:00' and d_date_sk is not null)
+                                                          predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-04-01 00:00:00.0' AND TIMESTAMP'2001-05-31 01:00:00.0' and d_date_sk is not null)
                                                           TableScan [TS_3] (rows=73049 width=1119)
                                                             default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query20.q.out b/ql/src/test/results/clientpositive/perf/tez/query20.q.out
index 0c3bc5e..632133e 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query20.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query20.q.out
@@ -122,7 +122,7 @@ Stage-0
                                     Select Operator [SEL_43] (rows=8116 width=1119)
                                       Output:["_col0"]
                                       Filter Operator [FIL_42] (rows=8116 width=1119)
-                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null)
+                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null)
                                         TableScan [TS_3] (rows=73049 width=1119)
                                           default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query21.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query21.q.out b/ql/src/test/results/clientpositive/perf/tez/query21.q.out
index 923e3d0..f741075 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query21.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query21.q.out
@@ -133,7 +133,7 @@ Stage-0
                                     Select Operator [SEL_52] (rows=8116 width=1119)
                                       Output:["_col0","_col1"]
                                       Filter Operator [FIL_51] (rows=8116 width=1119)
-                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-09 00:00:00' AND TIMESTAMP'1998-05-08 00:00:00' and d_date_sk is not null)
+                                        predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-08 23:00:00.0' AND TIMESTAMP'1998-05-08 00:00:00.0' and d_date_sk is not null)
                                         TableScan [TS_3] (rows=73049 width=1119)
                                           default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query32.q.out b/ql/src/test/results/clientpositive/perf/tez/query32.q.out
index 74740cb..258175f 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query32.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query32.q.out
@@ -101,7 +101,7 @@ Stage-0
                             Select Operator [SEL_66] (rows=8116 width=1119)
                               Output:["_col0"]
                               Filter Operator [FIL_65] (rows=8116 width=1119)
-                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null)
+                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null)
                                 TableScan [TS_3] (rows=73049 width=1119)
                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                     <-Reducer 6 [ONE_TO_ONE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query37.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query37.q.out b/ql/src/test/results/clientpositive/perf/tez/query37.q.out
index 6112a5a..bbd085e 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query37.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query37.q.out
@@ -97,7 +97,7 @@ Stage-0
                             Select Operator [SEL_55] (rows=8116 width=1119)
                               Output:["_col0"]
                               Filter Operator [FIL_54] (rows=8116 width=1119)
-                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-06-02 00:00:00' AND TIMESTAMP'2001-08-01 00:00:00' and d_date_sk is not null)
+                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-06-02 00:00:00.0' AND TIMESTAMP'2001-08-01 00:00:00.0' and d_date_sk is not null)
                                 TableScan [TS_9] (rows=73049 width=1119)
                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query40.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query40.q.out b/ql/src/test/results/clientpositive/perf/tez/query40.q.out
index 4c48bd8..fba9e76 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query40.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query40.q.out
@@ -119,7 +119,7 @@ Stage-0
                                   Select Operator [SEL_66] (rows=8116 width=1119)
                                     Output:["_col0","_col1"]
                                     Filter Operator [FIL_65] (rows=8116 width=1119)
-                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-09 00:00:00' AND TIMESTAMP'1998-05-08 00:00:00' and d_date_sk is not null)
+                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-08 23:00:00.0' AND TIMESTAMP'1998-05-08 00:00:00.0' and d_date_sk is not null)
                                       TableScan [TS_6] (rows=73049 width=1119)
                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                               <-Reducer 2 [SIMPLE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query5.q.out b/ql/src/test/results/clientpositive/perf/tez/query5.q.out
index 4b63f60..f074ed1 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query5.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query5.q.out
@@ -326,7 +326,7 @@ Stage-0
                                         Select Operator [SEL_186] (rows=8116 width=1119)
                                           Output:["_col0"]
                                           Filter Operator [FIL_185] (rows=8116 width=1119)
-                                            predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-08-18 00:00:00' and d_date_sk is not null)
+                                            predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-08-18 00:00:00.0' and d_date_sk is not null)
                                             TableScan [TS_8] (rows=73049 width=1119)
                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                                     <-Union 19 [SIMPLE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query77.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query77.q.out b/ql/src/test/results/clientpositive/perf/tez/query77.q.out
index d8e8d17..66f3d08 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query77.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query77.q.out
@@ -279,7 +279,7 @@ Stage-0
                                         Select Operator [SEL_209] (rows=8116 width=1119)
                                           Output:["_col0"]
                                           Filter Operator [FIL_208] (rows=8116 width=1119)
-                                            predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null)
+                                            predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null)
                                             TableScan [TS_3] (rows=73049 width=1119)
                                               default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                                     <-Map 27 [SIMPLE_EDGE] vectorized

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query80.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query80.q.out b/ql/src/test/results/clientpositive/perf/tez/query80.q.out
index fb4cd56..6309900 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query80.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query80.q.out
@@ -295,7 +295,7 @@ Stage-0
                                                   Select Operator [SEL_240] (rows=8116 width=1119)
                                                     Output:["_col0"]
                                                     Filter Operator [FIL_239] (rows=8116 width=1119)
-                                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00' AND TIMESTAMP'1998-09-03 00:00:00' and d_date_sk is not null)
+                                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-08-04 00:00:00.0' AND TIMESTAMP'1998-09-03 00:00:00.0' and d_date_sk is not null)
                                                       TableScan [TS_6] (rows=73049 width=1119)
                                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                                               <-Reducer 27 [SIMPLE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query82.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query82.q.out b/ql/src/test/results/clientpositive/perf/tez/query82.q.out
index 9a9b707..a0361b1 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query82.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query82.q.out
@@ -97,7 +97,7 @@ Stage-0
                             Select Operator [SEL_55] (rows=8116 width=1119)
                               Output:["_col0"]
                               Filter Operator [FIL_54] (rows=8116 width=1119)
-                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2002-05-30 00:00:00' AND TIMESTAMP'2002-07-29 00:00:00' and d_date_sk is not null)
+                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2002-05-30 00:00:00.0' AND TIMESTAMP'2002-07-29 00:00:00.0' and d_date_sk is not null)
                                 TableScan [TS_9] (rows=73049 width=1119)
                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query92.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query92.q.out b/ql/src/test/results/clientpositive/perf/tez/query92.q.out
index e94d2f4..be03c4d 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query92.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query92.q.out
@@ -112,7 +112,7 @@ Stage-0
                                   Select Operator [SEL_69] (rows=8116 width=1119)
                                     Output:["_col0"]
                                     Filter Operator [FIL_68] (rows=8116 width=1119)
-                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00' AND TIMESTAMP'1998-06-16 00:00:00' and d_date_sk is not null)
+                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1998-03-18 00:00:00.0' AND TIMESTAMP'1998-06-16 01:00:00.0' and d_date_sk is not null)
                                       TableScan [TS_3] (rows=73049 width=1119)
                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
                           <-Reducer 7 [ONE_TO_ONE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query94.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query94.q.out b/ql/src/test/results/clientpositive/perf/tez/query94.q.out
index 565d5b0..66733a8 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query94.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query94.q.out
@@ -182,7 +182,7 @@ Stage-0
                                                       Select Operator [SEL_88] (rows=8116 width=1119)
                                                         Output:["_col0"]
                                                         Filter Operator [FIL_87] (rows=8116 width=1119)
-                                                          predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00' AND TIMESTAMP'1999-06-30 00:00:00' and d_date_sk is not null)
+                                                          predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00.0' AND TIMESTAMP'1999-06-30 00:00:00.0' and d_date_sk is not null)
                                                           TableScan [TS_3] (rows=73049 width=1119)
                                                             default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query95.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query95.q.out b/ql/src/test/results/clientpositive/perf/tez/query95.q.out
index fdd617d..61b2c81 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query95.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query95.q.out
@@ -225,7 +225,7 @@ Stage-0
                                             Select Operator [SEL_132] (rows=8116 width=1119)
                                               Output:["_col0"]
                                               Filter Operator [FIL_131] (rows=8116 width=1119)
-                                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00' AND TIMESTAMP'1999-06-30 00:00:00' and d_date_sk is not null)
+                                                predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00.0' AND TIMESTAMP'1999-06-30 00:00:00.0' and d_date_sk is not null)
                                                 TableScan [TS_3] (rows=73049 width=1119)
                                                   default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/perf/tez/query98.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/tez/query98.q.out b/ql/src/test/results/clientpositive/perf/tez/query98.q.out
index ea814da..1c4e58b 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query98.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query98.q.out
@@ -126,7 +126,7 @@ Stage-0
                                   Select Operator [SEL_42] (rows=8116 width=1119)
                                     Output:["_col0"]
                                     Filter Operator [FIL_41] (rows=8116 width=1119)
-                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00' AND TIMESTAMP'2001-02-11 00:00:00' and d_date_sk is not null)
+                                      predicate:(CAST( d_date AS TIMESTAMP) BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' and d_date_sk is not null)
                                       TableScan [TS_3] (rows=73049 width=1119)
                                         default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/singletsinsertorc.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/singletsinsertorc.q.out b/ql/src/test/results/clientpositive/singletsinsertorc.q.out
deleted file mode 100644
index 0322eca..0000000
--- a/ql/src/test/results/clientpositive/singletsinsertorc.q.out
+++ /dev/null
@@ -1,28 +0,0 @@
-PREHOOK: query: CREATE TABLE myorctable(ts timestamp)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@myorctable
-POSTHOOK: query: CREATE TABLE myorctable(ts timestamp)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@myorctable
-PREHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@myorctable
-POSTHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@myorctable
-POSTHOOK: Lineage: myorctable.ts SCRIPT []
-PREHOOK: query: SELECT * FROM myorctable
-PREHOOK: type: QUERY
-PREHOOK: Input: default@myorctable
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM myorctable
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@myorctable
-#### A masked pattern was here ####
-1970-01-01 00:00:00

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/date_udf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/date_udf.q.out b/ql/src/test/results/clientpositive/spark/date_udf.q.out
index 7681a50..37ad29e 100644
--- a/ql/src/test/results/clientpositive/spark/date_udf.q.out
+++ b/ql/src/test/results/clientpositive/spark/date_udf.q.out
@@ -74,19 +74,19 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OV
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@date_udf_flight
-PREHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+PREHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
     weekofyear(d), to_date(d)
   from date_udf
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_udf
 #### A masked pattern was here ####
-POSTHOOK: query: select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+POSTHOOK: query: select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
     weekofyear(d), to_date(d)
   from date_udf
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf
 #### A masked pattern was here ####
-1304665200	1304640000	2011	5	6	6	18	2011-05-06
+1304665200	2011	5	6	6	18	2011-05-06
 PREHOOK: query: select date_add(d, 5), date_sub(d, 10)
   from date_udf
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
index 7cceb88..5033c13 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
@@ -41,17 +41,17 @@ STAGE PLANS:
                 TableScan
                   alias: orc_merge5_n5
                   filterExpr: (userid <= 13L) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (userid <= 13L) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                             output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -93,8 +93,10 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b_n0
@@ -130,17 +132,17 @@ STAGE PLANS:
                 TableScan
                   alias: orc_merge5_n5
                   filterExpr: (userid <= 13L) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (userid <= 13L) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(38,0)), ts (type: timestamp)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                      Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
-                        Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                             output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -220,7 +222,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b_n0
@@ -253,8 +255,10 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+Found 3 items
+-rw-r--r--   3 ### USER ### ### GROUP ###        667 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###          0 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###        623 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b_n0
@@ -309,7 +313,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
 Found 1 items
--rw-r--r--   3 ### USER ### ### GROUP ###        668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r--   3 ### USER ### ### GROUP ###       1054 ### HDFS DATE ### hdfs://### HDFS PATH ###
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_merge5b_n0


[21/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out
index eceff3b..8765301 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_part_all_primitive_llap_io.q.out
@@ -375,15 +375,15 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n3@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
 101	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
-101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	-28	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	31716	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272478692	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	84	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	-8620	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272503892	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416490068	original
 102	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	127	127	127	127	127	127	127	127	127	NULL	NULL	NULL	32767	32767	32767	32767	32767	32767	32767	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	NULL	NULL	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	NULL	new
-102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	106	0	127	-1	-1	0	0	NULL	32767	32767	32767	-17302	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563868266	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	-38	0	127	-1	-1	0	0	NULL	32767	32767	32767	7898	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563893466	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
 104	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	23	23	23	23	23	23	23	23	23	NULL	NULL	NULL	834	834	834	834	834	834	834	834	NULL	NULL	NULL	NULL	203332	203332	203332	203332	203332	203332	203332	NULL	NULL	NULL	NULL	NULL	888888847499264	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	NULL	new
-104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	-26	1	23	6724	3734	-100	30	NULL	834	834	834	27366	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	86	1	23	6724	3734	-100	30	NULL	834	834	834	-12970	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 105	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	-99	-99	-99	-99	-99	-99	-99	-99	-99	NULL	NULL	NULL	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	NULL	NULL	NULL	NULL	-999992	-999992	-999992	-999992	-999992	-999992	-999992	NULL	NULL	NULL	NULL	NULL	-222282153984	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	NULL	new
-105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	87	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	20055	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	-41	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	-16681	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n3
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n3
@@ -676,11 +676,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n3
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n3@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	NULL	NULL	NULL	-46114.000000000000000000	-46114.000000000000000000	-46114.285000000000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.000000034120603E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n3
 PREHOOK: type: DROPTABLE
@@ -900,11 +900,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n3
 POSTHOOK: Input: default@part_change_various_various_timestamp_n3@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n3
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out
index 9d32b03..9a930be 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_part_all_primitive.q.out
@@ -324,15 +324,15 @@ POSTHOOK: Input: default@part_change_various_various_boolean_to_bigint_n7@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	c34	c35	c36	c37	c38	c39	c40	c41	c42	c43	c44	c45	c46	c47	c48	c49	c50	c51	c52	c53	b
 101	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	-128	-128	-128	-128	-128	-128	-128	-128	-128	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	-2147483648	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
-101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	-28	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	31716	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272478692	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416464868	original
+101	1	true	NULL	true	NULL	true	true	true	true	true	1	NULL	0	NULL	-1	-1	NULL	-128	-128	-128	84	1	-128	0	NULL	-1	-1	NULL	NULL	NULL	NULL	-8620	1	-128	NULL	NULL	2147483647	2147483647	NULL	-2147483648	-2147483648	-2147483648	1272503892	1	-128	NULL	-2147483648	9223372036854775807	9223372036854775807	NULL	NULL	NULL	NULL	134416490068	original
 102	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	127	127	127	127	127	127	127	127	127	NULL	NULL	NULL	32767	32767	32767	32767	32767	32767	32767	32767	NULL	NULL	NULL	NULL	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	2147483647	NULL	NULL	NULL	NULL	NULL	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	9223372036854775807	NULL	new
-102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	106	0	127	-1	-1	0	0	NULL	32767	32767	32767	-17302	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563868266	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117919850	original
+102	1	true	true	true	true	true	true	true	false	true	0	-1	-1	-1	0	0	NULL	127	127	127	-38	0	127	-1	-1	0	0	NULL	32767	32767	32767	7898	0	127	32767	-1	-2147483648	-2147483648	NULL	2147483647	2147483647	2147483647	1563893466	0	127	32767	2147483647	-9223372036854775808	-9223372036854775808	NULL	9223372036854775807	9223372036854775807	9223372036854775807	126117945050	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
 104	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	true	NULL	NULL	23	23	23	23	23	23	23	23	23	NULL	NULL	NULL	834	834	834	834	834	834	834	834	NULL	NULL	NULL	NULL	203332	203332	203332	203332	203332	203332	203332	NULL	NULL	NULL	NULL	NULL	888888847499264	888888857923222	888888857923222	888888857923222	888888857923222	888888857923222	NULL	new
-104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	-26	1	23	6724	3734	-100	30	NULL	834	834	834	27366	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270887654	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270887654	original
+104	1	true	true	true	true	true	true	true	true	true	1	66	68	-106	-100	30	NULL	23	23	23	86	1	23	6724	3734	-100	30	NULL	834	834	834	-12970	1	23	834	-1868624234	-100	30	66475	203332	203332	203332	270912854	1	23	834	203332	-100	30	66475	888888857923222	888888857923222	888888857923222	270912854	original
 105	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	NULL	-99	-99	-99	-99	-99	-99	-99	-99	-99	NULL	NULL	NULL	-28300	-28300	-28300	-28300	-28300	-28300	-28300	-28300	NULL	NULL	NULL	NULL	-999992	-999992	-999992	-999992	-999992	-999992	-999992	NULL	NULL	NULL	NULL	NULL	-222282153984	-222282153733	-222282153733	-222282153733	-222282153733	-222282153733	NULL	new
-105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	87	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	20055	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663178839	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663178839	original
+105	1	true	true	true	true	NULL	true	true	false	true	0	116	-56	-5	NULL	34	NULL	-99	-99	-99	-41	0	-99	-16952	-32517	NULL	-19422	NULL	-28300	-28300	-28300	-16681	0	-99	-28300	1056145659	NULL	46114	9250340	-999992	-999992	-999992	663207639	0	-99	-28300	-999992	NULL	46114	9250340	-222282153733	-222282153733	-222282153733	663207639	original
 PREHOOK: query: drop table part_change_various_various_boolean_to_bigint_n7
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@part_change_various_various_boolean_to_bigint_n7
@@ -574,11 +574,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n7
 POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n7@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	c13	c14	c15	c16	c17	c18	c19	c20	c21	c22	c23	c24	c25	c26	c27	c28	c29	c30	c31	c32	c33	b
-101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416464868.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.34416466E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441646486897012E11	original
-102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117919850.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117921E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117919850597E11	original
+101	1	1.000000000000000000	-128.000000000000000000	NULL	-2147483648.000000000000000000	NULL	NULL	NULL	99999999999999999999.999999999999999999	99999999999999999999.999900000000000000	99999999999999999999.999900000000000000	134416490068.970120000000000000	1.0	-128.0	NULL	-2.14748365E9	NULL	1.0E20	Infinity	Infinity	3.4028236E24	3.4028236E24	1.3441649E11	1.0	-128.0	NULL	-2.147483648E9	NULL	1.0E20	Infinity	1.7976931348623157E308	1.7976931348623157E308	1.7976931348623157E308	1.3441649006897012E11	original
+102	1	0.000000000000000000	127.000000000000000000	32767.000000000000000000	2147483647.000000000000000000	9223372036854775807.000000000000000000	NULL	NULL	-99999999999999999999.999999999999999999	-99999999999999999999.999000000000000000	-99999999999999999999.999000000000000000	126117945050.597000000000000000	0.0	127.0	32767.0	2.14748365E9	9.223372E18	-1.0E20	-Infinity	-Infinity	-3.4028233E23	-3.4028233E23	1.26117945E11	0.0	127.0	32767.0	2.147483647E9	9.223372036854776E18	-1.0E20	-Infinity	-1.7976931348623157E308	-1.7976931348623157E308	-1.7976931348623157E308	1.26117945050597E11	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270887654.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70887648E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70887654E8	original
-105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663178839.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6317882E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.631788397203685E8	original
+104	1	1.000000000000000000	23.000000000000000000	834.000000000000000000	203332.000000000000000000	888888857923222.000000000000000000	-100.359780000000000000	30.774000000000000000	66475.561431000000000000	66475.561431000000000000	66475.561431000000000000	270912854.000000000000000000	1.0	23.0	834.0	203332.0	8.8888885E14	66475.56	30.774	-100.35978	-100.35978	-100.35978	2.70912864E8	1.0	23.0	834.0	203332.0	8.88888857923222E14	66475.561431	-100.35977935791016	30.774	30.774	30.774	2.70912854E8	original
+105	1	0.000000000000000000	-99.000000000000000000	-28300.000000000000000000	-999992.000000000000000000	-222282153733.000000000000000000	NULL	46114.280000000000000000	9250340.750000000000000000	9250340.750000000000000000	9250340.750000000000000000	663207639.720368500000000000	0.0	-99.0	-28300.0	-999992.0	-2.22282154E11	9250341.0	46114.28	NULL	NULL	NULL	6.6320762E8	0.0	-99.0	-28300.0	-999992.0	-2.22282153733E11	9250340.75	NULL	46114.28	46114.28	46114.28	6.632076397203685E8	original
 111	1	NULL	NULL	NULL	-46114.000000000000000000	-46114.000000000000000000	-46114.285000000000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	-46114.284799488000000000	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	NULL	NULL	NULL	NULL	NULL	-9.0E-8	-9.000000034120603E-8	-9.0E-8	-9.0E-8	-9.0E-8	NULL	new
 PREHOOK: query: drop table part_change_various_various_decimal_to_double_n7
 PREHOOK: type: DROPTABLE
@@ -747,11 +747,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n7
 POSTHOOK: Input: default@part_change_various_various_timestamp_n7@part=1
 #### A masked pattern was here ####
 insert_num	part	c1	c2	c3	c4	c5	c6	c7	c8	c9	c10	c11	c12	b
-101	1	1970-01-01 00:00:00.001	1969-12-31 23:59:59.872	NULL	1969-12-07 03:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
-102	1	1970-01-01 00:00:00	1970-01-01 00:00:00.127	1970-01-01 00:00:32.767	1970-01-25 20:31:23.647	NULL	NULL	1970-01-01 00:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
+101	1	1969-12-31 16:00:00.001	1969-12-31 15:59:59.872	NULL	1969-12-06 19:28:36.352	NULL	NULL	NULL	NULL	6229-06-28 02:54:28.970117179	6229-06-28 02:54:28.97011	6229-06-28 02:54:28.97011	1950-12-18 00:00:00	original
+102	1	1969-12-31 16:00:00	1969-12-31 16:00:00.127	1969-12-31 16:00:32.767	1970-01-25 12:31:23.647	NULL	NULL	1969-12-31 16:00:00	NULL	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	5966-07-09 03:30:50.597	2049-12-18 00:00:00	original
 103	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	original
-104	1	1970-01-01 00:00:00.001	1970-01-01 00:00:00.023	1970-01-01 00:00:00.834	1970-01-01 00:03:23.332	NULL	1969-12-31 23:58:19.640220643	1970-01-01 00:00:30.774	1970-01-01 18:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
-105	1	1970-01-01 00:00:00	1969-12-31 23:59:59.901	1969-12-31 23:59:31.7	1969-12-31 23:43:20.008	1962-12-16 06:57:26.267	NULL	1970-01-01 12:48:34.28	1970-04-18 01:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
+104	1	1969-12-31 16:00:00.001	1969-12-31 16:00:00.023	1969-12-31 16:00:00.834	1969-12-31 16:03:23.332	NULL	1969-12-31 15:58:19.640220643	1969-12-31 16:00:30.774	1970-01-01 10:27:55.561431	1978-08-02 06:34:14	1978-08-02 06:34:14	1978-08-02 06:34:14	2021-09-24 00:00:00	original
+105	1	1969-12-31 16:00:00	1969-12-31 15:59:59.901	1969-12-31 15:59:31.7	1969-12-31 15:43:20.008	1962-12-15 22:57:26.267	NULL	1970-01-01 04:48:34.28	1970-04-17 17:32:20.75	1991-01-06 16:20:39.72036854	1991-01-06 16:20:39.72036	1991-01-06 16:20:39.72036	2024-11-11 00:00:00	original
 111	1	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	new
 PREHOOK: query: drop table part_change_various_various_timestamp_n7
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out b/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out
deleted file mode 100644
index 0322eca..0000000
--- a/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out
+++ /dev/null
@@ -1,28 +0,0 @@
-PREHOOK: query: CREATE TABLE myorctable(ts timestamp)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@myorctable
-POSTHOOK: query: CREATE TABLE myorctable(ts timestamp)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@myorctable
-PREHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@myorctable
-POSTHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@myorctable
-POSTHOOK: Lineage: myorctable.ts SCRIPT []
-PREHOOK: query: SELECT * FROM myorctable
-PREHOOK: type: QUERY
-PREHOOK: Input: default@myorctable
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM myorctable
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@myorctable
-#### A masked pattern was here ####
-1970-01-01 00:00:00

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/update_all_types.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/update_all_types.q.out b/ql/src/test/results/clientpositive/llap/update_all_types.q.out
index 9fb8d3f..db9f166 100644
--- a/ql/src/test/results/clientpositive/llap/update_all_types.q.out
+++ b/ql/src/test/results/clientpositive/llap/update_all_types.q.out
@@ -147,7 +147,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@acid_uat
 #### A masked pattern was here ####
 -51	NULL	-1071480828	-1071480828	-1401575336	-51.0	NULL	-51.00	1969-12-31 16:00:08.451	NULL	aw724t8c5558x2xneC624	aw724t8c5558x2xneC624	4uE7l74tESBiKfu7c8wM7GA             	true
-1	2	-1070883071	3	4	3.14	6.28	5.99	2014-09-01 00:00:00	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
+1	2	-1070883071	3	4	3.14	6.28	5.99	NULL	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
 11	NULL	-1069736047	-1069736047	-453772520	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	k17Am8uPHWk02cEf1jet	k17Am8uPHWk02cEf1jet	qrXLLNX1                            	true
 11	NULL	-1072910839	-1072910839	2048385991	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	0iqrc5	0iqrc5	KbaDXiN85adbHRx58v                  	false
 11	NULL	-1073279343	-1073279343	-1595604468	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	oj1YrV5Wa	oj1YrV5Wa	P76636jJ6qM17d7DIy                  	true
@@ -181,7 +181,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@acid_uat
 #### A masked pattern was here ####
 -102	-51	-1071480828	-1071480828	-1401575336	-51.0	-51.0	-51.00	1969-12-31 16:00:08.451	NULL	aw724t8c5558x2xneC624	aw724t8c5558x2xneC624	4uE7l74tESBiKfu7c8wM7GA             	true
-1	2	-1070883071	3	4	3.14	6.28	5.99	2014-09-01 00:00:00	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
+1	2	-1070883071	3	4	3.14	6.28	5.99	NULL	2014-09-01	its a beautiful day in the neighbhorhood	a beautiful day for a neighbor	wont you be mine                    	true
 11	NULL	-1069736047	-1069736047	-453772520	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	k17Am8uPHWk02cEf1jet	k17Am8uPHWk02cEf1jet	qrXLLNX1                            	true
 11	NULL	-1072910839	-1072910839	2048385991	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	0iqrc5	0iqrc5	KbaDXiN85adbHRx58v                  	false
 11	NULL	-1073279343	-1073279343	-1595604468	11.0	NULL	11.00	1969-12-31 16:00:02.351	NULL	oj1YrV5Wa	oj1YrV5Wa	P76636jJ6qM17d7DIy                  	true

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out
index b1fa6a7..ffe3bfb 100644
--- a/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out
@@ -520,4 +520,4 @@ POSTHOOK: query: select min(ts), max(ts), sum(ts), avg(ts) from vectortab2korc_n
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@vectortab2korc_n4
 #### A masked pattern was here ####
-2013-02-18 21:06:48	2081-02-22 01:21:53	4.591334884281E12	2.4254278311045957E9
+2013-02-18 21:06:48	2081-02-22 01:21:53	4.591384881081E12	2.4254542425150557E9

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_between_in.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_between_in.q.out b/ql/src/test/results/clientpositive/llap/vector_between_in.q.out
index 26dae0b..7355ed8 100644
--- a/ql/src/test/results/clientpositive/llap/vector_between_in.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_between_in.q.out
@@ -859,7 +859,7 @@ POSTHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdate NOT IN (CAST
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_date_test
 #### A masked pattern was here ####
-6022
+6026
 PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) ORDER BY cdecimal1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_date_test
@@ -903,7 +903,36 @@ POSTHOOK: Input: default@decimal_date_test
 1969-12-30
 1969-12-30
 1969-12-30
-1969-12-30
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
+1969-12-31
 1969-12-31
 1969-12-31
 1969-12-31
@@ -932,37 +961,8 @@ POSTHOOK: Input: default@decimal_date_test
 1970-01-01
 1970-01-01
 1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
-1970-01-01
+1970-01-02
+1970-01-02
 1970-01-02
 1970-01-02
 1970-01-02
@@ -986,40 +986,40 @@ POSTHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate NOT BETWEEN CAS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_date_test
 #### A masked pattern was here ####
-1968-04-07
+1968-04-06
 1968-04-08
 1968-04-09
 1968-04-13
-1968-04-16
-1968-04-16
+1968-04-15
+1968-04-15
 1968-04-18
 1968-04-22
+1968-04-24
 1968-04-25
-1968-04-25
-1968-04-27
-1968-04-27
-1968-04-27
+1968-04-26
+1968-04-26
+1968-04-26
+1968-04-28
 1968-04-28
 1968-04-28
 1968-04-28
-1968-04-29
 1968-04-29
 1968-04-30
 1971-09-02
-1971-09-05
+1971-09-04
 1971-09-06
 1971-09-06
 1971-09-06
 1971-09-09
 1971-09-09
 1971-09-15
+1971-09-17
 1971-09-18
-1971-09-19
+1971-09-21
 1971-09-21
 1971-09-21
 1971-09-22
 1971-09-22
-1971-09-23
 1971-09-25
 PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 BETWEEN -20 AND 45.9918918919 ORDER BY cdecimal1
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
index 4227ba9..e47c118 100644
--- a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
@@ -318,7 +318,7 @@ order by k
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hundredorc
 #### A masked pattern was here ####
--8303557760
+-27832781952
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT count(*), bin
 FROM hundredorc


[32/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 6e7d7e6..fabbfba 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -97,26 +97,26 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-0	val_0	2012-02-23 01:15:54
-119	val_119	2012-02-23 01:15:54
-136	val_136	2012-02-23 01:15:54
-153	val_153	2012-02-23 01:15:54
-17	val_17	2012-02-23 01:15:54
-170	val_170	2012-02-23 01:15:54
-187	val_187	2012-02-23 01:15:54
-221	val_221	2012-02-23 01:15:54
-238	val_238	2012-02-23 01:15:54
-255	val_255	2012-02-23 01:15:54
-272	val_272	2012-02-23 01:15:54
-289	val_289	2012-02-23 01:15:54
-306	val_306	2012-02-23 01:15:54
-323	val_323	2012-02-23 01:15:54
-34	val_34	2012-02-23 01:15:54
-374	val_374	2012-02-23 01:15:54
-459	val_459	2012-02-23 01:15:54
-493	val_493	2012-02-23 01:15:54
-51	val_51	2012-02-23 01:15:54
-85	val_85	2012-02-23 01:15:54
+0	val_0	2012-02-22 17:15:54
+119	val_119	2012-02-22 17:15:54
+136	val_136	2012-02-22 17:15:54
+153	val_153	2012-02-22 17:15:54
+17	val_17	2012-02-22 17:15:54
+170	val_170	2012-02-22 17:15:54
+187	val_187	2012-02-22 17:15:54
+221	val_221	2012-02-22 17:15:54
+238	val_238	2012-02-22 17:15:54
+255	val_255	2012-02-22 17:15:54
+272	val_272	2012-02-22 17:15:54
+289	val_289	2012-02-22 17:15:54
+306	val_306	2012-02-22 17:15:54
+323	val_323	2012-02-22 17:15:54
+34	val_34	2012-02-22 17:15:54
+374	val_374	2012-02-22 17:15:54
+459	val_459	2012-02-22 17:15:54
+493	val_493	2012-02-22 17:15:54
+51	val_51	2012-02-22 17:15:54
+85	val_85	2012-02-22 17:15:54
 PREHOOK: query: DROP TABLE hbase_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@hbase_table
@@ -202,8 +202,8 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-165	val_165	1973-03-03 09:46:40
-396	val_396	1973-03-03 09:46:40
+165	val_165	1973-03-03 01:46:40
+396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 PREHOOK: type: QUERY
@@ -251,10 +251,10 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-111	val_111	1976-05-03 19:33:20
-222	val_222	1976-05-03 19:33:20
-296	val_296	1976-05-03 19:33:20
-333	val_333	1976-05-03 19:33:20
+111	val_111	1976-05-03 12:33:20
+222	val_222	1976-05-03 12:33:20
+296	val_296	1976-05-03 12:33:20
+333	val_333	1976-05-03 12:33:20
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 PREHOOK: type: QUERY
@@ -302,8 +302,8 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-165	val_165	1973-03-03 09:46:40
-396	val_396	1973-03-03 09:46:40
+165	val_165	1973-03-03 01:46:40
+396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
 SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 PREHOOK: type: QUERY
@@ -351,10 +351,10 @@ POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table W
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-111	val_111	1976-05-03 19:33:20
-222	val_222	1976-05-03 19:33:20
-296	val_296	1976-05-03 19:33:20
-333	val_333	1976-05-03 19:33:20
+111	val_111	1976-05-03 12:33:20
+222	val_222	1976-05-03 12:33:20
+296	val_296	1976-05-03 12:33:20
+333	val_333	1976-05-03 12:33:20
 PREHOOK: query: DROP TABLE hbase_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@hbase_table

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
index d33d343..6dcee40 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
@@ -19,12 +19,12 @@
 
 package org.apache.hive.hcatalog.data;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
index 405f1b0..57c48f8 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecord.java
@@ -19,16 +19,16 @@
 
 package org.apache.hive.hcatalog.data;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
index af80c02..114c205 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
@@ -21,6 +21,8 @@ package org.apache.hive.hcatalog.data;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -32,11 +34,9 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
index 2641add..cb1c459 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
@@ -22,6 +22,7 @@ package org.apache.hive.hcatalog.data;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.Iterator;
@@ -29,16 +30,14 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.VIntWritable;
 import org.apache.hadoop.io.VLongWritable;
 
@@ -122,11 +121,11 @@ public abstract class ReaderWriter {
       hdw.readFields(in);
       return hdw.getHiveDecimal();
     case DataType.DATE:
-      DateWritableV2 dw = new DateWritableV2();
+      DateWritable dw = new DateWritable();
       dw.readFields(in);
       return dw.get();
     case DataType.TIMESTAMP:
-      TimestampWritableV2 tw = new TimestampWritableV2();
+      TimestampWritable tw = new TimestampWritable();
       tw.readFields(in);
       return tw.getTimestamp();
     default:
@@ -215,10 +214,10 @@ public abstract class ReaderWriter {
       new HiveDecimalWritable((HiveDecimal)val).write(out);
       return;
     case DataType.DATE:
-      new DateWritableV2((Date)val).write(out);
+      new DateWritable((Date)val).write(out);
       return;
     case DataType.TIMESTAMP:
-      new TimestampWritableV2((Timestamp)val).write(out);
+      new TimestampWritable((java.sql.Timestamp)val).write(out);
       return;
     default:
       throw new IOException("Unexpected data type " + type +

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
index d57d171..7a643f1 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestDefaultHCatRecord.java
@@ -29,17 +29,17 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;
@@ -276,7 +276,7 @@ public class TestDefaultHCatRecord extends TestCase {
     rec_hcat13types.add(new HiveChar("hive_char", 10));
     rec_hcat13types.add(new HiveVarchar("hive_varchar", 20));
     rec_hcat13types.add(Date.valueOf("2014-01-06"));
-    rec_hcat13types.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
+    rec_hcat13types.add(new Timestamp(System.currentTimeMillis()));
     return new DefaultHCatRecord(rec_hcat13types);
   }
   private static HCatRecord getHCat13TypesComplexRecord() {
@@ -290,7 +290,7 @@ public class TestDefaultHCatRecord extends TestCase {
     List<Object> list = new ArrayList<Object>();
     list.add(Date.valueOf("2014-01-05"));
     list.add(new HashMap<HiveDecimal, String>(m));
-    m2.put(Timestamp.ofEpochMilli(System.currentTimeMillis()), list);
+    m2.put(new Timestamp(System.currentTimeMillis()), list);
     rec_hcat13ComplexTypes.add(m2);
     return new DefaultHCatRecord(rec_hcat13ComplexTypes);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
index 6770d44..8aeb4f4 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/data/TestJsonSerDe.java
@@ -20,6 +20,8 @@ package org.apache.hive.hcatalog.data;
 
 import java.io.UnsupportedEncodingException;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -30,11 +32,9 @@ import java.util.Properties;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -98,7 +98,7 @@ public class TestJsonSerDe extends TestCase {
     rlist.add(new HiveChar("hive\nchar", 10));
     rlist.add(new HiveVarchar("hive\nvarchar", 20));
     rlist.add(Date.valueOf("2014-01-07"));
-    rlist.add(Timestamp.ofEpochMilli(System.currentTimeMillis()));
+    rlist.add(new Timestamp(System.currentTimeMillis()));
     rlist.add("hive\nbinary".getBytes("UTF-8"));
 
     List<Object> nlist = new ArrayList<Object>(13);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
index 994c505..ec620d2 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
@@ -21,6 +21,8 @@ package org.apache.hive.hcatalog.pig;
 
 import java.io.IOException;
 import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -33,11 +35,9 @@ import java.util.Properties;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -419,7 +419,7 @@ abstract class HCatBaseStorer extends StoreFunc implements StoreMetadata {
         return new HiveVarchar(varcharVal, vti.getLength());
       case TIMESTAMP:
         DateTime dt = (DateTime)pigObj;
-        return Timestamp.ofEpochMilli(dt.getMillis());//toEpochMilli() returns UTC time regardless of TZ
+        return new Timestamp(dt.getMillis());//getMillis() returns UTC time regardless of TZ
       case DATE:
         /**
          * We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any
@@ -437,7 +437,7 @@ abstract class HCatBaseStorer extends StoreFunc implements StoreMetadata {
           for local timezone.  Date.valueOf() also uses local timezone (as does Date(int,int,int).
           Also see PigHCatUtil#extractPigObject() for corresponding read op.  This way a DATETIME from Pig,
           when stored into Hive and read back comes back with the same value.*/
-        return Date.of(dateTime.getYear(), dateTime.getMonthOfYear(), dateTime.getDayOfMonth());
+        return new Date(dateTime.getYear() - 1900, dateTime.getMonthOfYear() - 1, dateTime.getDayOfMonth());
       default:
         throw new BackendException("Unexpected HCat type " + type + " for value " + pigObj
           + " of class " + pigObj.getClass().getName(), PigHCatUtil.PIG_EXCEPTION_CODE);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
index c3bde2d..f5c3c75 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
@@ -54,7 +54,6 @@ import org.apache.pig.ResourceSchema;
 import org.apache.pig.ResourceStatistics;
 import org.apache.pig.impl.util.UDFContext;
 import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
 import org.joda.time.format.DateTimeFormat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
index afe6e92..163fe6a 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
@@ -20,6 +20,8 @@ package org.apache.hive.hcatalog.pig;
 
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -29,11 +31,9 @@ import java.util.Map.Entry;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -62,7 +62,6 @@ import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.util.UDFContext;
 import org.apache.pig.impl.util.Utils;
 import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -430,7 +429,7 @@ class PigHCatUtil {
       * e.g. d = new java.sql.Date(System.currentMillis()).toString() so if you do this just after
       * midnight in Palo Alto, you'll get yesterday's date printed out.*/
       Date d = (Date)o;
-      result = new DateTime(d.getYear(), d.getMonth(), d.getDay(), 0, 0, DateTimeZone.UTC);
+      result = new DateTime(d.getYear() + 1900, d.getMonth() + 1, d.getDate(), 0, 0);//uses local TZ
       break;
     case TIMESTAMP:
       /*DATA TRUNCATION!!!
@@ -438,7 +437,7 @@ class PigHCatUtil {
        object in local TZ; This is arbitrary, since Hive value doesn't have any TZ notion, but
        we need to set something for TZ.
        Timestamp is consistently in GMT (unless you call toString() on it) so we use millis*/
-      result = new DateTime(((Timestamp)o).toEpochMilli(), DateTimeZone.UTC);
+      result = new DateTime(((Timestamp)o).getTime());//uses local TZ
       break;
     default:
       result = o;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
index 58981f8..0d72102 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java
@@ -29,6 +29,8 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.RandomAccessFile;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -40,8 +42,6 @@ import java.util.Properties;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.IDriver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -652,7 +652,7 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
      * All the values are within range of target data type (column)
      */
     private static final Object[][] primitiveRows = new Object[][] {
-        {Boolean.TRUE,Byte.MAX_VALUE,Short.MAX_VALUE, Integer.MAX_VALUE,Long.MAX_VALUE,Float.MAX_VALUE,Double.MAX_VALUE,555.22,"Kyiv","char(10)xx","varchar(20)","blah".getBytes(), Date.valueOf("2014-01-13"), Timestamp.valueOf("2014-01-13 19:26:25.0123")},
+        {Boolean.TRUE,Byte.MAX_VALUE,Short.MAX_VALUE, Integer.MAX_VALUE,Long.MAX_VALUE,Float.MAX_VALUE,Double.MAX_VALUE,555.22,"Kyiv","char(10)xx","varchar(20)","blah".getBytes(),Date.valueOf("2014-01-13"),Timestamp.valueOf("2014-01-13 19:26:25.0123")},
         {Boolean.FALSE,Byte.MIN_VALUE,Short.MIN_VALUE, Integer.MIN_VALUE,Long.MIN_VALUE,Float.MIN_VALUE,Double.MIN_VALUE,-555.22,"Saint Petersburg","char(xx)00","varchar(yy)","doh".getBytes(),Date.valueOf("2014-01-14"), Timestamp.valueOf("2014-01-14 19:26:25.0123")}
     };
     /**
@@ -701,22 +701,14 @@ public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
             assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
                 + " Reference data is null; actual "
                 + t.get(colPos), t.get(colPos) == null);
-          } else if (referenceData instanceof Date) {
+          } else if (referenceData instanceof java.util.Date) {
             // Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
             // reading Hive from Pig by design.
             assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
-                    + " Reference data=" + ((Date)referenceData).toEpochMilli()
-                    + " actual=" + ((DateTime)t.get(colPos)).getMillis()
-                    + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
-                ((Date)referenceData).toEpochMilli() == ((DateTime)t.get(colPos)).getMillis());
-          } else if (referenceData instanceof Timestamp) {
-            // Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
-            // reading Hive from Pig by design.
-            assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
-                + " Reference data=" + ((Timestamp)referenceData).toEpochMilli()
+                + " Reference data=" + ((java.util.Date)referenceData).getTime()
                 + " actual=" + ((DateTime)t.get(colPos)).getMillis()
                 + "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
-                ((Timestamp)referenceData).toEpochMilli()== ((DateTime)t.get(colPos)).getMillis());
+                ((java.util.Date)referenceData).getTime()== ((DateTime)t.get(colPos)).getMillis());
           } else {
             // Doing String comps here as value objects in Hive in Pig are different so equals()
             // doesn't work.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
----------------------------------------------------------------------
diff --git a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
index 19c30b0..a5cf3a5 100644
--- a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
+++ b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatStorerTest.java
@@ -213,17 +213,23 @@ public abstract class AbstractHCatStorerTest extends HCatBaseTest {
    */
   @Test
   public void testWriteTimestamp() throws Exception {
-    DateTime d = new DateTime(1991, 10, 11, 14, 23, 30, 10, DateTimeZone.UTC);// uses default TZ
+    DateTime d = new DateTime(1991, 10, 11, 14, 23, 30, 10);// uses default TZ
     pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.UTC).toString());
+        d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.plusHours(2);
     pigValueRangeTest("junitTypeTest2", "timestamp", "datetime",
         HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null, d.toString(),
-        d.toDateTime(DateTimeZone.UTC).toString());
+        d.toDateTime(DateTimeZone.getDefault()).toString());
+    d = d.toDateTime(DateTimeZone.UTC);
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
+        d.toDateTime(DateTimeZone.getDefault()).toString());
 
-    d = new DateTime(1991, 10, 11, 23, 24, 25, 26, DateTimeZone.UTC);
+    d = new DateTime(1991, 10, 11, 23, 24, 25, 26);
     pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
-        d.toDateTime(DateTimeZone.UTC).toString());
+        d.toDateTime(DateTimeZone.getDefault()).toString());
+    d = d.toDateTime(DateTimeZone.UTC);
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
+        d.toDateTime(DateTimeZone.getDefault()).toString());
   }
 
   // End: tests that check values from Pig that are out of range for target column

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
----------------------------------------------------------------------
diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
index 2ed5e5e..d80b6d4 100644
--- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
+++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
@@ -16,10 +16,11 @@
 
 package org.apache.hive.benchmark.vectorization;
 
+import java.sql.Timestamp;
 import java.util.Random;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -27,14 +28,12 @@ import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
-
 public class ColumnVectorGenUtil {
 
   private static final long LONG_VECTOR_NULL_VALUE = 1;
@@ -145,7 +144,7 @@ public class ColumnVectorGenUtil {
     final boolean repeating, final int size, final Random rand) {
     Timestamp[] timestamps = new Timestamp[size];
     for (int i = 0; i < size; i++) {
-      timestamps[i] = Timestamp.ofEpochMilli(rand.nextInt());
+      timestamps[i] = new Timestamp(rand.nextInt());
     }
     return generateTimestampColumnVector(nulls, repeating, size, rand, timestamps);
   }
@@ -170,10 +169,10 @@ public class ColumnVectorGenUtil {
         tcv.isNull[i] = false;
         if (!repeating) {
           Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
-          tcv.set(i, randomTimestamp.toSqlTimestamp());
+          tcv.set(i, randomTimestamp);
           timestampValues[i] = randomTimestamp;
         } else {
-          tcv.set(i, repeatingTimestamp.toSqlTimestamp());
+          tcv.set(i, repeatingTimestamp);
           timestampValues[i] = repeatingTimestamp;
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
index 280119b..7a891ef 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/BaseJdbcWithMiniLlap.java
@@ -31,10 +31,12 @@ import java.lang.reflect.Field;
 import java.math.BigDecimal;
 import java.net.URL;
 import java.sql.Connection;
+import java.sql.Date;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -74,8 +76,6 @@ import org.apache.hadoop.io.Text;
 
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
 import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.llap.LlapBaseInputFormat;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
index e69c686..9dfece9 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapArrow.java
@@ -21,8 +21,8 @@ package org.apache.hive.jdbc;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertArrayEquals;
 import java.math.BigDecimal;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import org.apache.hadoop.hive.llap.FieldDesc;
 import org.apache.hadoop.hive.llap.Row;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 2b36ceb..517b413 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -1686,7 +1686,6 @@ druid.query.files=druidmini_test1.q,\
   druidmini_test_insert.q,\
   druidmini_mv.q,\
   druid_timestamptz.q,\
-  druid_timestamptz2.q,\
   druidmini_dynamic_partition.q,\
   druidmini_expressions.q,\
   druidmini_extractTime.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
----------------------------------------------------------------------
diff --git a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
index f328d6e..77559e1 100644
--- a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
+++ b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.llap.io;
 
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.FilterInputStream;
 import java.io.FilterOutputStream;
 import java.io.IOException;
@@ -29,7 +31,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
 
-import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.junit.Test;
 import static org.junit.Assert.*;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
index af853e3..32f3bed 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
@@ -105,7 +105,7 @@ public class GenericColumnVectorProducer implements ColumnVectorProducer {
 
   public static final class SerDeStripeMetadata implements ConsumerStripeMetadata {
     // The writer is local to the process.
-    private final String writerTimezone = "UTC";
+    private final String writerTimezone = TimeZone.getDefault().getID();
     private List<ColumnEncoding> encodings;
     private final int stripeIx;
     private long rowCount = -1;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index 40248a3..0d7435c 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -223,8 +223,7 @@ public class OrcEncodedDataConsumer
     TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext()
             .setSchemaEvolution(evolution).skipCorrupt(skipCorrupt)
             .writerTimeZone(stripeMetadata.getWriterTimezone())
-            .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion())
-            .useUTCTimestamp(true);
+            .fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion());
     this.batchSchemas = includes.getBatchReaderTypes(fileSchema);
     StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader(
         batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
index f4e85bd..0d3ee2b 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
@@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * Generated from template DTIColumnCompareScalar.txt, which covers comparison

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
index b198e08..be5f641 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
index c3982ed..32dd6ed 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -99,65 +99,65 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       scratchIntervalYearMonth2.set((int) vector2[0]);
       dtm.<OperatorMethod>(
           scratchDate1, scratchIntervalYearMonth2,  outputDate);
-      outputVector[0] = DateWritableV2.dateToDays(outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
     } else if (inputColVector1.isRepeating) {
-      scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else if (inputColVector2.isRepeating) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               scratchDate1, scratchIntervalYearMonth2,  outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
index 06c7368..94c0c5c 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -93,10 +93,10 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector1.isRepeating) {
       if (inputColVector1.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+        scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
         dtm.<OperatorMethod>(
             scratchDate1, value, outputDate);
-        outputVector[0] = DateWritableV2.dateToDays(outputDate);
+        outputVector[0] = DateWritable.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -115,18 +115,18 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+             scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
              dtm.<OperatorMethod>(
                  scratchDate1, value, outputDate);
-             outputVector[i] = DateWritableV2.dateToDays(outputDate);
+             outputVector[i] = DateWritable.dateToDays(outputDate);
            }
          }
       } else {
@@ -138,10 +138,10 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchDate1, value, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -155,10 +155,10 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -168,10 +168,10 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                 scratchDate1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
index 53637a6..96c525d 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticTimestampColumn.txt, a class
@@ -97,12 +97,12 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       dtm.<OperatorMethod>(
           scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
       outputColVector.setFromScratch<CamelReturnType>(0);
     } else if (inputColVector1.isRepeating) {
-      scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
@@ -122,14 +122,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
          }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
@@ -139,14 +139,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
          dtm.<OperatorMethod>(
               scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
               scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
index e9e9193..fb22992 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
@@ -91,7 +91,7 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector1.isRepeating) {
       if (inputColVector1.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+        scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
         dtm.<OperatorMethod>(
             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
         outputColVector.setFromScratch<CamelReturnType>(0);
@@ -112,7 +112,7 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -120,7 +120,7 @@ public class <ClassName> extends VectorExpression {
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+             scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
              dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
              outputColVector.setFromScratch<CamelReturnType>(i);
@@ -135,7 +135,7 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
           dtm.<OperatorMethod>(
              scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
@@ -152,7 +152,7 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -165,7 +165,7 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+            scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
             dtm.<OperatorMethod>(
                scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
             outputColVector.setFromScratch<CamelReturnType>(i);
@@ -182,7 +182,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
index 7d2434a..0c8ec9c 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -58,7 +58,7 @@ public class <ClassName> extends VectorExpression {
 
   public <ClassName>(long value, int colNum, int outputColumnNum) {
     super(outputColumnNum);
-    this.value = new Date(DateWritableV2.daysToMillis((int) value));
+    this.value = new Date(DateWritable.daysToMillis((int) value));
     this.colNum = colNum;
   }
 
@@ -110,7 +110,7 @@ public class <ClassName> extends VectorExpression {
         scratchIntervalYearMonth2.set((int) vector2[0]);
         dtm.<OperatorMethod>(
             value, scratchIntervalYearMonth2, outputDate);
-        outputVector[0] = DateWritableV2.dateToDays(outputDate);
+        outputVector[0] = DateWritable.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -131,7 +131,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
@@ -139,7 +139,7 @@ public class <ClassName> extends VectorExpression {
              scratchIntervalYearMonth2.set((int) vector2[i]);
              dtm.<OperatorMethod>(
                  value, scratchIntervalYearMonth2, outputDate);
-             outputVector[i] = DateWritableV2.dateToDays(outputDate);
+             outputVector[i] = DateWritable.dateToDays(outputDate);
            }
          }
       } else {
@@ -154,7 +154,7 @@ public class <ClassName> extends VectorExpression {
           scratchIntervalYearMonth2.set((int) vector2[i]);
           dtm.<OperatorMethod>(
               value, scratchIntervalYearMonth2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -171,7 +171,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -184,7 +184,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth2.set((int) vector2[i]);
             dtm.<OperatorMethod>(
                 value, scratchIntervalYearMonth2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
index 678d827..ef8f2a3 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticTimestampColumnBase.txt.
@@ -57,7 +57,7 @@ public class <ClassName> extends VectorExpression {
     super(outputColumnNum);
     // Scalar input #1 is type date (days).  For the math we convert it to a timestamp.
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritableV2.daysToMillis((int) value));
+    this.value.setTime(DateWritable.daysToMillis((int) value));
     this.colNum = colNum;
   }
 
@@ -189,7 +189,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
+    return "val " + value + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
index 1785abe..5242bbd 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
@@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.Filter<TypeName>ColumnBetween;
 import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
index 6a05d77..3fa542b 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampColumnCompareTimestampScalar.txt
@@ -141,7 +141,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
index 36628a7..fcda65f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterTimestampScalarCompareTimestampColumn.txt
@@ -144,7 +144,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
+    return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
index 4473bf0..393413f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -100,44 +100,44 @@ public class <ClassName> extends VectorExpression {
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
       scratchIntervalYearMonth1.set((int) vector1[0]);
-      scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       dtm.<OperatorMethod>(
           scratchIntervalYearMonth1, scratchDate2, outputDate);
-      outputVector[0] = DateWritableV2.dateToDays(outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
     } else if (inputColVector1.isRepeating) {
       scratchIntervalYearMonth1.set((int) vector1[0]);
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else if (inputColVector2.isRepeating) {
-      scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else {
@@ -145,18 +145,18 @@ public class <ClassName> extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           scratchIntervalYearMonth1.set((int) vector1[i]);
-          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
           scratchIntervalYearMonth1.set((int) vector1[i]);
-          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
index f0c1910..a65c4b9 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -50,7 +50,7 @@ public class <ClassName> extends VectorExpression {
   public <ClassName>(int colNum, long value, int outputColumnNum) {
     super(outputColumnNum);
     this.colNum = colNum;
-    this.value = new Date(DateWritableV2.daysToMillis((int) value));
+    this.value = new Date(DateWritable.daysToMillis((int) value));
   }
 
   public <ClassName>() {
@@ -96,7 +96,7 @@ public class <ClassName> extends VectorExpression {
         scratchIntervalYearMonth1.set((int) vector1[0]);
         dtm.<OperatorMethod>(
             scratchIntervalYearMonth1, value, outputDate);
-        outputVector[0] = DateWritableV2.dateToDays(outputDate);
+        outputVector[0] = DateWritable.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -118,7 +118,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
@@ -126,7 +126,7 @@ public class <ClassName> extends VectorExpression {
              scratchIntervalYearMonth1.set((int) vector1[i]);
              dtm.<OperatorMethod>(
                  scratchIntervalYearMonth1, value, outputDate);
-             outputVector[i] = DateWritableV2.dateToDays(outputDate);
+             outputVector[i] = DateWritable.dateToDays(outputDate);
            }
          }
       } else {
@@ -141,7 +141,7 @@ public class <ClassName> extends VectorExpression {
           scratchIntervalYearMonth1.set((int) vector1[i]);
           dtm.<OperatorMethod>(
               scratchIntervalYearMonth1, value, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -158,7 +158,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -170,7 +170,7 @@ public class <ClassName> extends VectorExpression {
             scratchIntervalYearMonth1.set((int) vector1[i]);
             dtm.<OperatorMethod>(
                 scratchIntervalYearMonth1, value, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
             outputIsNull[i] = false;
           } else {
             outputIsNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
index 71e142c..c297116 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
@@ -182,7 +182,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
index 27f2fcf..34d516e 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -107,10 +107,10 @@ public class <ClassName> extends VectorExpression {
     if (inputColVector2.isRepeating) {
       if (inputColVector2.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+        scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
         dtm.<OperatorMethod>(
             value, scratchDate2, outputDate);
-        outputVector[0] = DateWritableV2.dateToDays(outputDate);
+        outputVector[0] = DateWritable.dateToDays(outputDate);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -129,18 +129,18 @@ public class <ClassName> extends VectorExpression {
            for(int j = 0; j != n; j++) {
             final int i = sel[j];
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           }
          } else {
            for(int j = 0; j != n; j++) {
              final int i = sel[j];
-             scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+             scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
              dtm.<OperatorMethod>(
                  value, scratchDate2, outputDate);
-             outputVector[i] = DateWritableV2.dateToDays(outputDate);
+             outputVector[i] = DateWritable.dateToDays(outputDate);
            }
          }
       } else {
@@ -152,10 +152,10 @@ public class <ClassName> extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value, scratchDate2, outputDate);
-          outputVector[i] = DateWritableV2.dateToDays(outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -169,10 +169,10 @@ public class <ClassName> extends VectorExpression {
           int i = sel[j];
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;
@@ -182,10 +182,10 @@ public class <ClassName> extends VectorExpression {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
             outputIsNull[i] = false;
-            scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+            scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
             dtm.<OperatorMethod>(
                 value, scratchDate2, outputDate);
-            outputVector[i] = DateWritableV2.dateToDays(outputDate);
+            outputVector[i] = DateWritable.dateToDays(outputDate);
           } else {
             outputIsNull[i] = true;
             outputColVector.noNulls = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
index 397fca5..19263d9 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/LongDoubleColumnCompareTimestampScalar.txt
@@ -157,7 +157,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
+    return getColumnParamString(0, colNum) + ", val " + value;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
index 8e44c92..45f6408 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
@@ -27,8 +27,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template TimestampColumnArithmeticDateColumn.txt, which covers binary arithmetic
@@ -96,7 +96,7 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       dtm.<OperatorMethod>(
           inputColVector1.asScratch<CamelOperandType1>(0), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
       outputColVector.setFromScratch<CamelReturnType>(0);
@@ -105,21 +105,21 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               value1, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       }
     } else if (inputColVector2.isRepeating) {
-      scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
@@ -138,14 +138,14 @@ public class <ClassName> extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
           dtm.<OperatorMethod>(
               inputColVector1.asScratch<CamelOperandType1>(i), scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
           outputColVector.setFromScratch<CamelReturnType>(i);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
index cb897e4..0bbdce7 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 /**
  * Generated from template TimestampColumnArithmeticDateScalar.txt, which covers binary arithmetic
@@ -49,7 +49,7 @@ public class <ClassName> extends VectorExpression {
     super(outputColumnNum);
     this.colNum = colNum;
     this.value = new Timestamp(0);
-    this.value.setTime(DateWritableV2.daysToMillis((int) value));
+    this.value.setTime(DateWritable.daysToMillis((int) value));
   }
 
   public <ClassName>() {
@@ -175,7 +175,7 @@ public class <ClassName> extends VectorExpression {
 
   @Override
   public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum) + ", val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime());
+    return getColumnParamString(0, colNum) + ", val " + value.toString();
   }
 
   @Override


[16/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out
index 1908418..c66f3d4 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_date_funcs.q.out
@@ -265,7 +265,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), day(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') (type: int)
+                    expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), dayofmonth(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -352,143 +352,143 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
 #### A masked pattern was here ####
 fl_time	_c1	_c2	_c3	_c4	_c5	_c6	_c7	_c8	_c9	_c10	_c11	_c12	_c13	_c14	_c15	_c16	_c17	_c18	_c19
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20 07:00:00	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21 07:00:00	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22 07:00:00	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23 07:00:00	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24 07:00:00	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25 07:00:00	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26 07:00:00	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27 07:00:00	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28 07:00:00	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29 07:00:00	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30 07:00:00	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31 07:00:00	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20 07:00:00	1287583200	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21 07:00:00	1287669600	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22 07:00:00	1287756000	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23 07:00:00	1287842400	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24 07:00:00	1287928800	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25 07:00:00	1288015200	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26 07:00:00	1288101600	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27 07:00:00	1288188000	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28 07:00:00	1288274400	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29 07:00:00	1288360800	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30 07:00:00	1288447200	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31 07:00:00	1288533600	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   fl_date,
   to_unix_timestamp(fl_date),
@@ -557,7 +557,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), day(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59') (type: int)
+                    expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), dayofmonth(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -644,143 +644,143 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_udf_flight_orc
 #### A masked pattern was here ####
 fl_date	_c1	_c2	_c3	_c4	_c5	_c6	_c7	_c8	_c9	_c10	_c11	_c12	_c13	_c14	_c15	_c16	_c17	_c18	_c19
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-20	1287532800	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-21	1287619200	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-22	1287705600	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-23	1287792000	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-24	1287878400	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-25	1287964800	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-26	1288051200	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-27	1288137600	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-28	1288224000	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-29	1288310400	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-30	1288396800	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
-2010-10-31	1288483200	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-20	1287558000	2010	10	20	20	4	42	2010-10-20	2010-10-20	2010-10-22	2010-10-18	3945	3945	3945	3945	1316	1316	1316	1316
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-21	1287644400	2010	10	21	21	5	42	2010-10-21	2010-10-21	2010-10-23	2010-10-19	3946	3946	3946	3946	1317	1317	1317	1317
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-22	1287730800	2010	10	22	22	6	42	2010-10-22	2010-10-22	2010-10-24	2010-10-20	3947	3947	3947	3947	1318	1318	1318	1318
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-23	1287817200	2010	10	23	23	7	42	2010-10-23	2010-10-23	2010-10-25	2010-10-21	3948	3948	3948	3948	1319	1319	1319	1319
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-24	1287903600	2010	10	24	24	1	42	2010-10-24	2010-10-24	2010-10-26	2010-10-22	3949	3949	3949	3949	1320	1320	1320	1320
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-25	1287990000	2010	10	25	25	2	43	2010-10-25	2010-10-25	2010-10-27	2010-10-23	3950	3950	3950	3950	1321	1321	1321	1321
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-26	1288076400	2010	10	26	26	3	43	2010-10-26	2010-10-26	2010-10-28	2010-10-24	3951	3951	3951	3951	1322	1322	1322	1322
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-27	1288162800	2010	10	27	27	4	43	2010-10-27	2010-10-27	2010-10-29	2010-10-25	3952	3952	3952	3952	1323	1323	1323	1323
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-28	1288249200	2010	10	28	28	5	43	2010-10-28	2010-10-28	2010-10-30	2010-10-26	3953	3953	3953	3953	1324	1324	1324	1324
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-29	1288335600	2010	10	29	29	6	43	2010-10-29	2010-10-29	2010-10-31	2010-10-27	3954	3954	3954	3954	1325	1325	1325	1325
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-30	1288422000	2010	10	30	30	7	43	2010-10-30	2010-10-30	2010-11-01	2010-10-28	3955	3955	3955	3955	1326	1326	1326	1326
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
+2010-10-31	1288508400	2010	10	31	31	1	43	2010-10-31	2010-10-31	2010-11-02	2010-10-29	3956	3956	3956	3956	1327	1327	1327	1327
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   fl_time,
   fl_date,
@@ -853,7 +853,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') = datediff(fl_date
 , TIMESTAMP'2000-01-01 11:13:09')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
+                    expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofmonth(fl_time) = dayofmonth(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0'
 ) = datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21
                     Select Vectorization:
                         className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
index 8b647f8..dfab9db 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
@@ -81,7 +81,7 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652058 23:59:59.999999999
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
 PREHOOK: query: SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@test_n2
@@ -240,7 +240,7 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652058 23:59:59.999999999
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
 PREHOOK: type: QUERY
@@ -272,8 +272,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-02 16:00:00.0, 0002-02-03 16:00:00.0])
-                    predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00', TIMESTAMP'0002-02-02 00:00:00') (type: boolean)
+                        predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-01 00:00:00.0, 0002-02-02 00:00:00.0])
+                    predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00.0', TIMESTAMP'0002-02-02 00:00:00.0') (type: boolean)
                     Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: ts (type: timestamp)
@@ -465,7 +465,7 @@ POSTHOOK: query: SELECT AVG(ts), CAST(AVG(ts) AS TIMESTAMP) FROM test_n2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-9.5633352E10	5000-07-02 12:00:00
+9.56332944E10	5000-07-01 13:00:00
 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
 SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev(ts), stddev_pop(ts), stddev_samp(ts) FROM test_n2
 PREHOOK: type: QUERY
@@ -606,4 +606,4 @@ POSTHOOK: query: SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@test_n2
 #### A masked pattern was here ####
-2.4891041205457024E22	2.4891041205457024E22	4.978208241091405E22	1.577689488E11	1.577689488E11	1.577689488E11	2.2311898711430646E11
+2.489106846793884E22	2.489106846793884E22	4.978213693587768E22	1.577690352E11	1.577690352E11	1.577690352E11	2.2311910930235822E11


[02/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableV2.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableV2.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableV2.java
deleted file mode 100644
index 9aa7f19..0000000
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritableV2.java
+++ /dev/null
@@ -1,625 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2.io;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.time.format.DateTimeFormatter;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.common.type.TimestampUtils;
-import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableUtils;
-
-/**
- * TimestampWritableV2
- *
- * Timestamps are of the format
- *    YYYY-MM-DD HH:MM:SS.[fff...]
- *
- * We encode Unix timestamp in seconds in 4 bytes, using the MSB to signify
- * whether the timestamp has a fractional portion.
- *
- * The fractional portion is reversed, and encoded as a VInt
- * so timestamps with less precision use fewer bytes.
- *
- *      0.1    -&gt; 1
- *      0.01   -&gt; 10
- *      0.001  -&gt; 100
- *
- */
-public class TimestampWritableV2 implements WritableComparable<TimestampWritableV2> {
-
-  static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
-
-  private static final int DECIMAL_OR_SECOND_VINT_FLAG = 0x80000000;
-  private static final int LOWEST_31_BITS_OF_SEC_MASK = 0x7fffffff;
-
-  private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48;
-
-
-  /** The maximum number of bytes required for a TimestampWritableV2 */
-  public static final int MAX_BYTES = 13;
-
-  public static final int BINARY_SORTABLE_LENGTH = 11;
-
-  public static final DateTimeFormatter DATE_TIME_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
-
-  private Timestamp timestamp = new Timestamp();
-
-  /**
-   * true if data is stored in timestamp field rather than byte arrays.
-   *      allows for lazy conversion to bytes when necessary
-   * false otherwise
-   */
-  private boolean bytesEmpty;
-  private boolean timestampEmpty;
-
-  /* Allow use of external byte[] for efficiency */
-  private byte[] currentBytes;
-  private final byte[] internalBytes = new byte[MAX_BYTES];
-  private byte[] externalBytes;
-  private int offset;
-
-  /* Constructors */
-  public TimestampWritableV2() {
-    bytesEmpty = false;
-    currentBytes = internalBytes;
-    offset = 0;
-
-    clearTimestamp();
-  }
-
-  public TimestampWritableV2(byte[] bytes, int offset) {
-    set(bytes, offset);
-  }
-
-  public TimestampWritableV2(TimestampWritableV2 t) {
-    this(t.getBytes(), 0);
-  }
-
-  public TimestampWritableV2(Timestamp t) {
-    set(t);
-  }
-
-  public void set(byte[] bytes, int offset) {
-    externalBytes = bytes;
-    this.offset = offset;
-    bytesEmpty = false;
-    currentBytes = externalBytes;
-
-    clearTimestamp();
-  }
-
-  public void set(Timestamp t) {
-    if (t == null) {
-      timestamp.set(null);
-      return;
-    }
-    timestamp.set(t);
-    bytesEmpty = true;
-    timestampEmpty = false;
-  }
-
-  public void set(TimestampWritableV2 t) {
-    if (t.bytesEmpty) {
-      set(t.getTimestamp());
-      return;
-    }
-    if (t.currentBytes == t.externalBytes) {
-      set(t.currentBytes, t.offset);
-    } else {
-      set(t.currentBytes, 0);
-    }
-  }
-
-  public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
-    timestamp.setTimeInMillis(secondsAsMillis, nanos);
-  }
-
-  public void setInternal(long secondsAsMillis, int nanos) {
-    // This is our way of documenting that we are MUTATING the contents of
-    // this writable's internal timestamp.
-    updateTimestamp(timestamp, secondsAsMillis, nanos);
-
-    bytesEmpty = true;
-    timestampEmpty = false;
-  }
-
-  private void clearTimestamp() {
-    timestampEmpty = true;
-  }
-
-  public void writeToByteStream(RandomAccessOutput byteStream) {
-    checkBytes();
-    byteStream.write(currentBytes, offset, getTotalLength());
-  }
-
-  /**
-   *
-   * @return seconds corresponding to this TimestampWritableV2
-   */
-  public long getSeconds() {
-    if (!timestampEmpty) {
-      return timestamp.toEpochSecond();
-    } else if (!bytesEmpty) {
-      return TimestampWritableV2.getSeconds(currentBytes, offset);
-    } else {
-      throw new IllegalStateException("Both timestamp and bytes are empty");
-    }
-  }
-
-  /**
-   *
-   * @return nanoseconds in this TimestampWritableV2
-   */
-  public int getNanos() {
-    if (!timestampEmpty) {
-      return timestamp.getNanos();
-    } else if (!bytesEmpty) {
-      return hasDecimalOrSecondVInt() ?
-          TimestampWritableV2.getNanos(currentBytes, offset + 4) : 0;
-    } else {
-      throw new IllegalStateException("Both timestamp and bytes are empty");
-    }
-  }
-
-  /**
-   * @return length of serialized TimestampWritableV2 data. As a side effect, populates the internal
-   *         byte array if empty.
-   */
-  int getTotalLength() {
-    checkBytes();
-    return getTotalLength(currentBytes, offset);
-  }
-
-  public static int getTotalLength(byte[] bytes, int offset) {
-    int len = 4;
-    if (hasDecimalOrSecondVInt(bytes[offset])) {
-      int firstVIntLen = WritableUtils.decodeVIntSize(bytes[offset + 4]);
-      len += firstVIntLen;
-      if (hasSecondVInt(bytes[offset + 4])) {
-        len += WritableUtils.decodeVIntSize(bytes[offset + 4 + firstVIntLen]);
-      }
-    }
-    return len;
-  }
-
-  public Timestamp getTimestamp() {
-    if (timestampEmpty) {
-      populateTimestamp();
-    }
-    return timestamp;
-  }
-
-  /**
-   * Used to create copies of objects
-   * @return a copy of the internal TimestampWritableV2 byte[]
-   */
-  public byte[] getBytes() {
-    checkBytes();
-
-    int len = getTotalLength();
-    byte[] b = new byte[len];
-
-    System.arraycopy(currentBytes, offset, b, 0, len);
-    return b;
-  }
-
-  /**
-   * @return byte[] representation of TimestampWritableV2 that is binary
-   * sortable (7 bytes for seconds, 4 bytes for nanoseconds)
-   */
-  public byte[] getBinarySortable() {
-    byte[] b = new byte[BINARY_SORTABLE_LENGTH];
-    int nanos = getNanos();
-    // We flip the highest-order bit of the seven-byte representation of seconds to make negative
-    // values come before positive ones.
-    long seconds = getSeconds() ^ SEVEN_BYTE_LONG_SIGN_FLIP;
-    sevenByteLongToBytes(seconds, b, 0);
-    intToBytes(nanos, b, 7);
-    return b;
-  }
-
-  /**
-   * Given a byte[] that has binary sortable data, initialize the internal
-   * structures to hold that data
-   * @param bytes the byte array that holds the binary sortable representation
-   * @param binSortOffset offset of the binary-sortable representation within the buffer.
-   */
-  public void setBinarySortable(byte[] bytes, int binSortOffset) {
-    // Flip the sign bit (and unused bits of the high-order byte) of the seven-byte long back.
-    long seconds = readSevenByteLong(bytes, binSortOffset) ^ SEVEN_BYTE_LONG_SIGN_FLIP;
-    int nanos = bytesToInt(bytes, binSortOffset + 7);
-    int firstInt = (int) seconds;
-    boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
-    if (nanos != 0 || hasSecondVInt) {
-      firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
-    } else {
-      firstInt &= LOWEST_31_BITS_OF_SEC_MASK;
-    }
-
-    intToBytes(firstInt, internalBytes, 0);
-    setNanosBytes(nanos, internalBytes, 4, hasSecondVInt);
-    if (hasSecondVInt) {
-      LazyBinaryUtils.writeVLongToByteArray(internalBytes,
-          4 + WritableUtils.decodeVIntSize(internalBytes[4]),
-          seconds >> 31);
-    }
-
-    currentBytes = internalBytes;
-    this.offset = 0;
-  }
-
-  /**
-   * The data of TimestampWritableV2 can be stored either in a byte[]
-   * or in a Timestamp object. Calling this method ensures that the byte[]
-   * is populated from the Timestamp object if previously empty.
-   */
-  private void checkBytes() {
-    if (bytesEmpty) {
-      // Populate byte[] from Timestamp
-      convertTimestampToBytes(timestamp, internalBytes, 0);
-      offset = 0;
-      currentBytes = internalBytes;
-      bytesEmpty = false;
-    }
-  }
-
-  /**
-   *
-   * @return double representation of the timestamp, accurate to nanoseconds
-   */
-  public double getDouble() {
-    double seconds, nanos;
-    if (bytesEmpty) {
-      seconds = timestamp.toEpochSecond();
-      nanos = timestamp.getNanos();
-    } else {
-      seconds = getSeconds();
-      nanos = getNanos();
-    }
-    return seconds + nanos / 1000000000;
-  }
-
-  public static long getLong(Timestamp timestamp) {
-    return timestamp.toEpochSecond();
-  }
-
-  public void readFields(DataInput in) throws IOException {
-    in.readFully(internalBytes, 0, 4);
-    if (TimestampWritableV2.hasDecimalOrSecondVInt(internalBytes[0])) {
-      in.readFully(internalBytes, 4, 1);
-      int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
-      if (len > 1) {
-        in.readFully(internalBytes, 5, len-1);
-      }
-
-      long vlong = LazyBinaryUtils.readVLongFromByteArray(internalBytes, 4);
-      if (vlong < -1000000000 || vlong > 999999999) {
-        throw new IOException(
-            "Invalid first vint value (encoded nanoseconds) of a TimestampWritableV2: " + vlong +
-            ", expected to be between -1000000000 and 999999999.");
-        // Note that -1000000000 is a valid value corresponding to a nanosecond timestamp
-        // of 999999999, because if the second VInt is present, we use the value
-        // (-reversedNanoseconds - 1) as the second VInt.
-      }
-      if (vlong < 0) {
-        // This indicates there is a second VInt containing the additional bits of the seconds
-        // field.
-        in.readFully(internalBytes, 4 + len, 1);
-        int secondVIntLen = (byte) WritableUtils.decodeVIntSize(internalBytes[4 + len]);
-        if (secondVIntLen > 1) {
-          in.readFully(internalBytes, 5 + len, secondVIntLen - 1);
-        }
-      }
-    }
-    currentBytes = internalBytes;
-    this.offset = 0;
-  }
-
-  public void write(DataOutput out) throws IOException {
-    checkBytes();
-    out.write(currentBytes, offset, getTotalLength());
-  }
-
-  public int compareTo(TimestampWritableV2 t) {
-    checkBytes();
-    long s1 = this.getSeconds();
-    long s2 = t.getSeconds();
-    if (s1 == s2) {
-      int n1 = this.getNanos();
-      int n2 = t.getNanos();
-      if (n1 == n2) {
-        return 0;
-      }
-      return n1 - n2;
-    } else {
-      return s1 < s2 ? -1 : 1;
-    }
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    return compareTo((TimestampWritableV2) o) == 0;
-  }
-
-  @Override
-  public String toString() {
-    if (timestampEmpty) {
-      populateTimestamp();
-    }
-
-    if (timestamp.getNanos() > 0) {
-      return timestamp.toString();
-    }
-
-    String timestampString = timestamp.toString();
-    if (timestampString.length() > 19) {
-      if (timestampString.length() == 21) {
-        if (timestampString.substring(19).compareTo(".0") == 0) {
-          return timestamp.format(DATE_TIME_FORMAT);
-        }
-      }
-      return timestamp.format(DATE_TIME_FORMAT) + timestampString.substring(19);
-    }
-
-    return timestamp.format(DATE_TIME_FORMAT);
-  }
-
-  @Override
-  public int hashCode() {
-    long seconds = getSeconds();
-    seconds <<= 30;  // the nanosecond part fits in 30 bits
-    seconds |= getNanos();
-    return (int) ((seconds >>> 32) ^ seconds);
-  }
-
-  private void populateTimestamp() {
-    long seconds = getSeconds();
-    int nanos = getNanos();
-    timestamp.setTimeInSeconds(seconds, nanos);
-  }
-
-  /** Static methods **/
-
-  /**
-   * Gets seconds stored as integer at bytes[offset]
-   * @param bytes
-   * @param offset
-   * @return the number of seconds
-   */
-  public static long getSeconds(byte[] bytes, int offset) {
-    int lowest31BitsOfSecondsAndFlag = bytesToInt(bytes, offset);
-    if (lowest31BitsOfSecondsAndFlag >= 0 ||  // the "has decimal or second VInt" flag is not set
-        !hasSecondVInt(bytes[offset + 4])) {
-      // The entire seconds field is stored in the first 4 bytes.
-      return lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK;
-    }
-
-    // We compose the seconds field from two parts. The lowest 31 bits come from the first four
-    // bytes. The higher-order bits come from the second VInt that follows the nanos field.
-    return ((long) (lowest31BitsOfSecondsAndFlag & LOWEST_31_BITS_OF_SEC_MASK)) |
-           (LazyBinaryUtils.readVLongFromByteArray(bytes,
-               offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4])) << 31);
-  }
-
-  public static int getNanos(byte[] bytes, int offset) {
-    VInt vInt = LazyBinaryUtils.threadLocalVInt.get();
-    LazyBinaryUtils.readVInt(bytes, offset, vInt);
-    int val = vInt.value;
-    if (val < 0) {
-      // This means there is a second VInt present that specifies additional bits of the timestamp.
-      // The reversed nanoseconds value is still encoded in this VInt.
-      val = -val - 1;
-    }
-    int len = (int) Math.floor(Math.log10(val)) + 1;
-
-    // Reverse the value
-    int tmp = 0;
-    while (val != 0) {
-      tmp *= 10;
-      tmp += val % 10;
-      val /= 10;
-    }
-    val = tmp;
-
-    if (len < 9) {
-      val *= Math.pow(10, 9 - len);
-    }
-    return val;
-  }
-
-  /**
-   * Writes a Timestamp's serialized value to byte array b at the given offset
-   * @param t to convert to bytes
-   * @param b destination byte array
-   * @param offset destination offset in the byte array
-   */
-  public static void convertTimestampToBytes(Timestamp t, byte[] b,
-      int offset) {
-    long seconds = t.toEpochSecond();
-    int nanos = t.getNanos();
-
-    boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
-    boolean hasDecimal = setNanosBytes(nanos, b, offset+4, hasSecondVInt);
-
-    int firstInt = (int) seconds;
-    if (hasDecimal || hasSecondVInt) {
-      firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
-    } else {
-      firstInt &= LOWEST_31_BITS_OF_SEC_MASK;
-    }
-    intToBytes(firstInt, b, offset);
-
-    if (hasSecondVInt) {
-      LazyBinaryUtils.writeVLongToByteArray(b,
-          offset + 4 + WritableUtils.decodeVIntSize(b[offset + 4]),
-          seconds >> 31);
-    }
-  }
-
-  /**
-   * Given an integer representing nanoseconds, write its serialized
-   * value to the byte array b at offset
-   *
-   * @param nanos
-   * @param b
-   * @param offset
-   * @return
-   */
-  private static boolean setNanosBytes(int nanos, byte[] b, int offset, boolean hasSecondVInt) {
-    int decimal = 0;
-    if (nanos != 0) {
-      int counter = 0;
-      while (counter < 9) {
-        decimal *= 10;
-        decimal += nanos % 10;
-        nanos /= 10;
-        counter++;
-      }
-    }
-
-    if (hasSecondVInt || decimal != 0) {
-      // We use the sign of the reversed-nanoseconds field to indicate that there is a second VInt
-      // present.
-      LazyBinaryUtils.writeVLongToByteArray(b, offset, hasSecondVInt ? (-decimal - 1) : decimal);
-    }
-    return decimal != 0;
-  }
-
-  public HiveDecimal getHiveDecimal() {
-    if (timestampEmpty) {
-      populateTimestamp();
-    }
-    return getHiveDecimal(timestamp);
-  }
-
-  public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
-    // The BigDecimal class recommends not converting directly from double to BigDecimal,
-    // so we convert through a string...
-    Double timestampDouble = TimestampUtils.getDouble(timestamp);
-    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
-    return result;
-  }
-
-  /**
-   * Converts the time in seconds or milliseconds to a timestamp.
-   * @param time time in seconds or in milliseconds
-   * @return the timestamp
-   */
-  public static Timestamp longToTimestamp(long time, boolean intToTimestampInSeconds) {
-    // If the time is in seconds, converts it to milliseconds first.
-    if (intToTimestampInSeconds) {
-      return Timestamp.ofEpochSecond(time);
-    }
-    return Timestamp.ofEpochMilli(time);
-  }
-
-  public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
-    long seconds = getSeconds(bytes, offset);
-    int nanos;
-    if (hasDecimalOrSecondVInt(bytes[offset])) {
-      nanos = getNanos(bytes, offset + 4);
-    } else {
-      nanos = 0;
-    }
-    t.setTimeInSeconds(seconds, nanos);
-  }
-
-  public static Timestamp createTimestamp(byte[] bytes, int offset) {
-    Timestamp t = new Timestamp();
-    TimestampWritableV2.setTimestamp(t, bytes, offset);
-    return t;
-  }
-
-  private static boolean hasDecimalOrSecondVInt(byte b) {
-    return (b >> 7) != 0;
-  }
-
-  private static boolean hasSecondVInt(byte b) {
-    return WritableUtils.isNegativeVInt(b);
-  }
-
-  private final boolean hasDecimalOrSecondVInt() {
-    return hasDecimalOrSecondVInt(currentBytes[offset]);
-  }
-
-  public final boolean hasDecimal() {
-    return hasDecimalOrSecondVInt() || currentBytes[offset + 4] != -1;
-    // If the first byte of the VInt is -1, the VInt itself is -1, indicating that there is a
-    // second VInt but the nanoseconds field is actually 0.
-  }
-
-  /**
-   * Writes <code>value</code> into <code>dest</code> at <code>offset</code>
-   * @param value
-   * @param dest
-   * @param offset
-   */
-  private static void intToBytes(int value, byte[] dest, int offset) {
-    dest[offset] = (byte) ((value >> 24) & 0xFF);
-    dest[offset+1] = (byte) ((value >> 16) & 0xFF);
-    dest[offset+2] = (byte) ((value >> 8) & 0xFF);
-    dest[offset+3] = (byte) (value & 0xFF);
-  }
-
-  /**
-   * Writes <code>value</code> into <code>dest</code> at <code>offset</code> as a seven-byte
-   * serialized long number.
-   */
-  static void sevenByteLongToBytes(long value, byte[] dest, int offset) {
-    dest[offset] = (byte) ((value >> 48) & 0xFF);
-    dest[offset+1] = (byte) ((value >> 40) & 0xFF);
-    dest[offset+2] = (byte) ((value >> 32) & 0xFF);
-    dest[offset+3] = (byte) ((value >> 24) & 0xFF);
-    dest[offset+4] = (byte) ((value >> 16) & 0xFF);
-    dest[offset+5] = (byte) ((value >> 8) & 0xFF);
-    dest[offset+6] = (byte) (value & 0xFF);
-  }
-
-  /**
-   *
-   * @param bytes
-   * @param offset
-   * @return integer represented by the four bytes in <code>bytes</code>
-   *  beginning at <code>offset</code>
-   */
-  private static int bytesToInt(byte[] bytes, int offset) {
-    return ((0xFF & bytes[offset]) << 24)
-        | ((0xFF & bytes[offset+1]) << 16)
-        | ((0xFF & bytes[offset+2]) << 8)
-        | (0xFF & bytes[offset+3]);
-  }
-
-  static long readSevenByteLong(byte[] bytes, int offset) {
-    // We need to shift everything 8 bits left and then shift back to populate the sign field.
-    return (((0xFFL & bytes[offset]) << 56)
-        | ((0xFFL & bytes[offset+1]) << 48)
-        | ((0xFFL & bytes[offset+2]) << 40)
-        | ((0xFFL & bytes[offset+3]) << 32)
-        | ((0xFFL & bytes[offset+4]) << 24)
-        | ((0xFFL & bytes[offset+5]) << 16)
-        | ((0xFFL & bytes[offset+6]) << 8)) >> 8;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
index e464339..c50cd40 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hive.serde2.lazy;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import java.sql.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector;
 import org.apache.hadoop.io.Text;
 
@@ -36,17 +36,17 @@ import org.apache.hadoop.io.Text;
  *    YYYY-MM-DD
  *
  */
-public class LazyDate extends LazyPrimitive<LazyDateObjectInspector, DateWritableV2> {
+public class LazyDate extends LazyPrimitive<LazyDateObjectInspector, DateWritable> {
   private static final Logger LOG = LoggerFactory.getLogger(LazyDate.class);
 
   public LazyDate(LazyDateObjectInspector oi) {
     super(oi);
-    data = new DateWritableV2();
+    data = new DateWritable();
   }
 
   public LazyDate(LazyDate copy) {
     super(copy);
-    data = new DateWritableV2(copy.data);
+    data = new DateWritable(copy.data);
   }
 
   /**
@@ -81,7 +81,7 @@ public class LazyDate extends LazyPrimitive<LazyDateObjectInspector, DateWritabl
    *          The Date to write
    * @throws IOException
    */
-  public static void writeUTF8(OutputStream out, DateWritableV2 d)
+  public static void writeUTF8(OutputStream out, DateWritable d)
       throws IOException {
     ByteBuffer b = Text.encode(d.toString());
     out.write(b.array(), 0, b.limit());

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
index 3473c56..ee801ee 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hive.serde2.lazy;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.UnsupportedEncodingException;
+import java.sql.Timestamp;
 
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
 
 /**
@@ -35,17 +35,17 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestam
  *    YYYY-MM-DD HH:MM:SS.[fff...]
  *
  */
-public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, TimestampWritableV2> {
+public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, TimestampWritable> {
   private static final Logger LOG = LoggerFactory.getLogger(LazyTimestamp.class);
 
   public LazyTimestamp(LazyTimestampObjectInspector oi) {
     super(oi);
-    data = new TimestampWritableV2();
+    data = new TimestampWritable();
   }
 
   public LazyTimestamp(LazyTimestamp copy) {
     super(copy);
-    data = new TimestampWritableV2(copy.data);
+    data = new TimestampWritable(copy.data);
   }
 
   /**
@@ -94,18 +94,18 @@ public class LazyTimestamp extends LazyPrimitive<LazyTimestampObjectInspector, T
    *          The Timestamp to write
    * @throws IOException
    */
-  public static void writeUTF8(OutputStream out, TimestampWritableV2 i)
+  public static void writeUTF8(OutputStream out, TimestampWritable i)
       throws IOException {
     if (i == null) {
       // Serialize as time 0
-      out.write(TimestampWritableV2.nullBytes);
+      out.write(TimestampWritable.nullBytes);
     } else {
       out.write(i.toString().getBytes("US-ASCII"));
     }
   }
 
   @Override
-  public TimestampWritableV2 getWritableObject() {
+  public TimestampWritable getWritableObject() {
     return data;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
index 14ff6d2..17c0357 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
@@ -17,20 +17,20 @@
  */
 package org.apache.hadoop.hive.serde2.lazy;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct;
@@ -301,7 +301,7 @@ public class VerifyLazy {
             throw new RuntimeException("Expected LazyDate");
           }
           Date value = ((LazyDate) primitiveObject).getWritableObject().get();
-          Date expected = ((DateWritableV2) expectedObject).get();
+          Date expected = ((DateWritable) expectedObject).get();
           if (!value.equals(expected)) {
             throw new RuntimeException("Date field mismatch (expected " + expected + " found " + value + ")");
           }
@@ -310,10 +310,10 @@ public class VerifyLazy {
       case TIMESTAMP:
         {
           if (!(primitiveObject instanceof LazyTimestamp)) {
-            throw new RuntimeException("TimestampWritableV2 expected writable not TimestampWritableV2");
+            throw new RuntimeException("TimestampWritable expected writable not TimestampWritable");
           }
           Timestamp value = ((LazyTimestamp) primitiveObject).getWritableObject().getTimestamp();
-          Timestamp expected = ((TimestampWritableV2) expectedObject).getTimestamp();
+          Timestamp expected = ((TimestampWritable) expectedObject).getTimestamp();
           if (!value.equals(expected)) {
             throw new RuntimeException("Timestamp field mismatch (expected " + expected + " found " + value + ")");
           }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
index 45c44da..1890f18 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.hive.serde2.lazy.fast;
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index a42d6f4..356326c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -20,14 +20,14 @@ package org.apache.hadoop.hive.serde2.lazy.fast;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayDeque;
 import java.util.Deque;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveChar;
@@ -35,12 +35,12 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyDate;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveDecimal;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveIntervalDayTime;
@@ -77,8 +77,8 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
   private Deque<Integer> indexStack = new ArrayDeque<Integer>();
 
   // For thread safety, we allocate private writable objects for our use only.
-  private DateWritableV2 dateWritable;
-  private TimestampWritableV2 timestampWritable;
+  private DateWritable dateWritable;
+  private TimestampWritable timestampWritable;
   private HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable;
   private HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable;
   private HiveIntervalDayTime hiveIntervalDayTime;
@@ -299,7 +299,7 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
   public void writeDate(Date date) throws IOException {
     beginPrimitive();
     if (dateWritable == null) {
-      dateWritable = new DateWritableV2();
+      dateWritable = new DateWritable();
     }
     dateWritable.set(date);
     LazyDate.writeUTF8(output, dateWritable);
@@ -311,7 +311,7 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
   public void writeDate(int dateAsDays) throws IOException {
     beginPrimitive();
     if (dateWritable == null) {
-      dateWritable = new DateWritableV2();
+      dateWritable = new DateWritable();
     }
     dateWritable.set(dateAsDays);
     LazyDate.writeUTF8(output, dateWritable);
@@ -325,7 +325,7 @@ public final class LazySimpleSerializeWrite implements SerializeWrite {
   public void writeTimestamp(Timestamp v) throws IOException {
     beginPrimitive();
     if (timestampWritable == null) {
-      timestampWritable = new TimestampWritableV2();
+      timestampWritable = new TimestampWritable();
     }
     timestampWritable.set(v);
     LazyTimestamp.writeUTF8(output, timestampWritable);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
index e356d23..3bc4ff7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyDate;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
- * A WritableDateObjectInspector inspects a DateWritableV2 Object.
+ * A WritableDateObjectInspector inspects a DateWritable Object.
  */
 public class LazyDateObjectInspector
-    extends AbstractPrimitiveLazyObjectInspector<DateWritableV2>
+    extends AbstractPrimitiveLazyObjectInspector<DateWritable>
     implements DateObjectInspector {
 
   protected LazyDateObjectInspector() {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
index a10a722..e0f993e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
@@ -17,17 +17,17 @@
  */
 package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
 
+import java.sql.Timestamp;
 import java.util.List;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hive.common.util.TimestampParser;
 
 public class LazyTimestampObjectInspector
-    extends AbstractPrimitiveLazyObjectInspector<TimestampWritableV2>
+    extends AbstractPrimitiveLazyObjectInspector<TimestampWritable>
     implements TimestampObjectInspector {
 
   protected List<String> timestampFormats = null;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
index fbfe961..2952e26 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector;
@@ -29,17 +29,17 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjec
  *
  */
 public class LazyBinaryDate extends
-    LazyBinaryPrimitive<WritableDateObjectInspector, DateWritableV2> {
+    LazyBinaryPrimitive<WritableDateObjectInspector, DateWritable> {
   static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDate.class);
 
   LazyBinaryDate(WritableDateObjectInspector oi) {
     super(oi);
-    data = new DateWritableV2();
+    data = new DateWritable();
   }
 
   LazyBinaryDate(LazyBinaryDate copy) {
     super(copy);
-    data = new DateWritableV2(copy.data);
+    data = new DateWritable(copy.data);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 660080c..24704a1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -24,7 +24,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
 import org.slf4j.Logger;
@@ -38,11 +37,12 @@ import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -318,7 +318,7 @@ public class LazyBinarySerDe extends AbstractSerDe {
   }
 
   public static void writeDateToByteStream(RandomAccessOutput byteStream,
-                                            DateWritableV2 date) {
+                                            DateWritable date) {
     LazyBinaryUtils.writeVInt(byteStream, date.getDays());
   }
 
@@ -505,13 +505,13 @@ public class LazyBinarySerDe extends AbstractSerDe {
       }
 
       case DATE: {
-        DateWritableV2 d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
+        DateWritable d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
         writeDateToByteStream(byteStream, d);
         return;
       }
       case TIMESTAMP: {
         TimestampObjectInspector toi = (TimestampObjectInspector) poi;
-        TimestampWritableV2 t = toi.getPrimitiveWritableObject(obj);
+        TimestampWritable t = toi.getPrimitiveWritableObject(obj);
         t.writeToByteStream(byteStream);
         return;
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
index 3e06892..b328508 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
@@ -27,11 +27,11 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -337,7 +337,7 @@ public class LazyBinarySerDe2 extends LazyBinarySerDe {
     @Override
     void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector,
         boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) {
-      DateWritableV2 d = ((DateObjectInspector) objInspector).getPrimitiveWritableObject(obj);
+      DateWritable d = ((DateObjectInspector) objInspector).getPrimitiveWritableObject(obj);
       LazyBinarySerDe.writeDateToByteStream(byteStream, d);
     }
   }
@@ -347,7 +347,7 @@ public class LazyBinarySerDe2 extends LazyBinarySerDe {
     void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector,
         boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) {
       TimestampObjectInspector toi = (TimestampObjectInspector) objInspector;
-      TimestampWritableV2 t = toi.getPrimitiveWritableObject(obj);
+      TimestampWritable t = toi.getPrimitiveWritableObject(obj);
       t.writeToByteStream(byteStream);
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
index 724a167..fa5461a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestamp.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
 
@@ -29,17 +29,17 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestamp
  *
  */
 public class LazyBinaryTimestamp extends
-    LazyBinaryPrimitive<WritableTimestampObjectInspector, TimestampWritableV2> {
+    LazyBinaryPrimitive<WritableTimestampObjectInspector, TimestampWritable> {
   static final Logger LOG = LoggerFactory.getLogger(LazyBinaryTimestamp.class);
 
   LazyBinaryTimestamp(WritableTimestampObjectInspector oi) {
     super(oi);
-    data = new TimestampWritableV2();
+    data = new TimestampWritable();
   }
 
   LazyBinaryTimestamp(LazyBinaryTimestamp copy) {
     super(copy);
-    data = new TimestampWritableV2(copy.data);
+    data = new TimestampWritable(copy.data);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
index eb028e3..ee1e2e6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
@@ -23,7 +23,7 @@ import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -203,7 +203,7 @@ public final class LazyBinaryUtils {
         break;
       case TIMESTAMP:
         recordInfo.elementOffset = 0;
-        recordInfo.elementSize = TimestampWritableV2.getTotalLength(bytes, offset);
+        recordInfo.elementSize = TimestampWritable.getTotalLength(bytes, offset);
         break;
       case TIMESTAMPLOCALTZ:
         recordInfo.elementOffset = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
index 000dfed..340f322 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
@@ -26,7 +26,7 @@ import java.util.Deque;
 import java.util.List;
 
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong;
@@ -303,7 +303,7 @@ public final class LazyBinaryDeserializeRead extends DeserializeRead {
       break;
     case TIMESTAMP:
       {
-        int length = TimestampWritableV2.getTotalLength(bytes, offset);
+        int length = TimestampWritable.getTotalLength(bytes, offset);
         int saveStart = offset;
         offset += length;
         // Last item -- ok to be at end.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index ec56b82..cd4e619 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -19,14 +19,14 @@
 package org.apache.hadoop.hive.serde2.lazybinary.fast;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayDeque;
 import java.util.Deque;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,12 +35,12 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
@@ -64,7 +64,7 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
   private boolean skipLengthPrefix = false;
 
   // For thread safety, we allocate private writable objects for our use only.
-  private TimestampWritableV2 timestampWritable;
+  private TimestampWritable timestampWritable;
   private HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable;
   private HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable;
   private HiveIntervalDayTime hiveIntervalDayTime;
@@ -308,7 +308,7 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
   @Override
   public void writeDate(Date date) throws IOException {
     beginElement();
-    writeVInt(DateWritableV2.dateToDays(date));
+    writeVInt(DateWritable.dateToDays(date));
     finishElement();
   }
 
@@ -327,7 +327,7 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
   public void writeTimestamp(Timestamp v) throws IOException {
     beginElement();
     if (timestampWritable == null) {
-      timestampWritable = new TimestampWritableV2();
+      timestampWritable = new TimestampWritable();
     }
     timestampWritable.set(v);
     timestampWritable.writeToByteStream(output);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index 9393fb8..a442cb1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -29,9 +29,9 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampLocalTZObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
 import org.apache.hive.common.util.Murmur3;
@@ -39,11 +39,13 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyDouble;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
@@ -718,7 +720,7 @@ public final class ObjectInspectorUtils {
       case DATE:
         return ((DateObjectInspector) poi).getPrimitiveWritableObject(o).hashCode();
       case TIMESTAMP:
-        TimestampWritableV2 t = ((TimestampObjectInspector) poi)
+        TimestampWritable t = ((TimestampObjectInspector) poi)
             .getPrimitiveWritableObject(o);
         return t.hashCode();
       case TIMESTAMPLOCALTZ:
@@ -855,7 +857,7 @@ public final class ObjectInspectorUtils {
             byteBuffer.putInt(((DateObjectInspector) poi).getPrimitiveWritableObject(o).getDays());
             return Murmur3.hash32(byteBuffer.array(), 4);
           case TIMESTAMP: {
-            TimestampWritableV2 t = ((TimestampObjectInspector) poi)
+            TimestampWritable t = ((TimestampObjectInspector) poi)
                     .getPrimitiveWritableObject(o);
             return Murmur3.hash32(t.getBytes());
           }
@@ -1110,16 +1112,16 @@ public final class ObjectInspectorUtils {
       }
 
       case DATE: {
-        DateWritableV2 d1 = ((DateObjectInspector) poi1)
+        DateWritable d1 = ((DateObjectInspector) poi1)
             .getPrimitiveWritableObject(o1);
-        DateWritableV2 d2 = ((DateObjectInspector) poi2)
+        DateWritable d2 = ((DateObjectInspector) poi2)
             .getPrimitiveWritableObject(o2);
         return d1.compareTo(d2);
       }
       case TIMESTAMP: {
-        TimestampWritableV2 t1 = ((TimestampObjectInspector) poi1)
+        TimestampWritable t1 = ((TimestampObjectInspector) poi1)
             .getPrimitiveWritableObject(o1);
-        TimestampWritableV2 t2 = ((TimestampObjectInspector) poi2)
+        TimestampWritable t2 = ((TimestampObjectInspector) poi2)
             .getPrimitiveWritableObject(o2);
         return t1.compareTo(t2);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
index f58364c..93a18f7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
@@ -17,10 +17,11 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import java.sql.Date;
+
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 
 /**
@@ -30,7 +31,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 @InterfaceStability.Stable
 public interface DateObjectInspector extends PrimitiveObjectInspector {
 
-  DateWritableV2 getPrimitiveWritableObject(Object o);
+  DateWritable getPrimitiveWritableObject(Object o);
 
   Date getPrimitiveJavaObject(Object o);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
index 7dc3d07..bd86c22 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
 public class JavaConstantDateObjectInspector extends JavaDateObjectInspector
@@ -35,6 +36,6 @@ public class JavaConstantDateObjectInspector extends JavaDateObjectInspector
     if (value==null) {
       return null;
     }
-    return new DateWritableV2(value);
+    return new DateWritable(value);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
index 4da7299..2453bc6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
 public class JavaConstantTimestampObjectInspector extends
@@ -35,6 +36,6 @@ public class JavaConstantTimestampObjectInspector extends
     if (value==null) {
       return null;
     }
-    return new TimestampWritableV2(value);
+    return new TimestampWritable(value);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
index 4cf0a60..d93d719 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
@@ -32,8 +33,8 @@ public class JavaDateObjectInspector
     super(TypeInfoFactory.dateTypeInfo);
   }
 
-  public DateWritableV2 getPrimitiveWritableObject(Object o) {
-    return o == null ? null : new DateWritableV2((Date) o);
+  public DateWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : new DateWritable((Date) o);
   }
 
   @Override
@@ -49,34 +50,20 @@ public class JavaDateObjectInspector
     if (value == null) {
       return null;
     }
-    ((Date) o).setTimeInDays(value.toEpochDay());
+    ((Date) o).setTime(value.getTime());
     return o;
   }
 
-  @Deprecated
-  public Object set(Object o, java.sql.Date value) {
-    if (value == null) {
-      return null;
-    }
-    ((Date) o).setTimeInMillis(value.getTime());
-    return o;
-  }
-
-  public Object set(Object o, DateWritableV2 d) {
+  public Object set(Object o, DateWritable d) {
     if (d == null) {
       return null;
     }
-    ((Date) o).setTimeInDays(d.get().toEpochDay());
+    ((Date) o).setTime(d.get().getTime());
     return o;
   }
 
-  @Deprecated
-  public Object create(java.sql.Date value) {
-    return Date.ofEpochMilli(value.getTime());
-  }
-
   public Object create(Date value) {
-    return Date.ofEpochDay(value.toEpochDay());
+    return new Date(value.getTime());
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
index 47719c8..1e805ba 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 public class JavaTimestampObjectInspector
@@ -29,8 +30,8 @@ public class JavaTimestampObjectInspector
     super(TypeInfoFactory.timestampTypeInfo);
   }
 
-  public TimestampWritableV2 getPrimitiveWritableObject(Object o) {
-    return o == null ? null : new TimestampWritableV2((Timestamp) o);
+  public TimestampWritable getPrimitiveWritableObject(Object o) {
+    return o == null ? null : new TimestampWritable((Timestamp) o);
   }
 
   @Override
@@ -44,54 +45,43 @@ public class JavaTimestampObjectInspector
       return null;
     }
     Timestamp source = (Timestamp) o;
-    return new Timestamp(source);
+    Timestamp copy = new Timestamp(source.getTime());
+    copy.setNanos(source.getNanos());
+    return copy;
   }
 
   public Timestamp get(Object o) {
     return (Timestamp) o;
   }
 
-  @Deprecated
-  public Object set(Object o, java.sql.Timestamp value) {
-    if (value == null) {
-      return null;
-    }
-    ((Timestamp) o).setTimeInMillis(value.getTime(), value.getNanos());
-    return o;
-  }
-
   public Object set(Object o, Timestamp value) {
     if (value == null) {
       return null;
     }
-    ((Timestamp) o).set(value);
+    ((Timestamp) o).setTime(value.getTime());
     return o;
   }
 
   public Object set(Object o, byte[] bytes, int offset) {
-    TimestampWritableV2.setTimestamp((Timestamp) o, bytes, offset);
+    TimestampWritable.setTimestamp((Timestamp) o, bytes, offset);
     return o;
   }
 
-  public Object set(Object o, TimestampWritableV2 tw) {
+  public Object set(Object o, TimestampWritable tw) {
     if (tw == null) {
       return null;
     }
     Timestamp t = (Timestamp) o;
-    t.set(tw.getTimestamp());
+    t.setTime(tw.getTimestamp().getTime());
+    t.setNanos(tw.getTimestamp().getNanos());
     return t;
   }
 
-  @Deprecated
-  public Object create(java.sql.Timestamp value) {
-    return Timestamp.ofEpochMilli(value.getTime(), value.getNanos());
-  }
-
   public Object create(Timestamp value) {
-    return new Timestamp(value);
+    return new Timestamp(value.getTime());
   }
 
   public Object create(byte[] bytes, int offset) {
-    return TimestampWritableV2.createTimestamp(bytes, offset);
+    return TimestampWritable.createTimestamp(bytes, offset);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
index 1e12cca..ba20a2c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.time.ZoneId;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
@@ -255,7 +255,7 @@ public class PrimitiveObjectInspectorConverter {
         SettableDateObjectInspector outputOI) {
       this.inputOI = inputOI;
       this.outputOI = outputOI;
-      r = outputOI.create(new Date());
+      r = outputOI.create(new Date(0));
     }
 
     public Object convert(Object input) {
@@ -277,7 +277,7 @@ public class PrimitiveObjectInspectorConverter {
         SettableTimestampObjectInspector outputOI) {
       this.inputOI = inputOI;
       this.outputOI = outputOI;
-      r = outputOI.create(new Timestamp());
+      r = outputOI.create(new Timestamp(0));
     }
 
     public void setIntToTimestampInSeconds(boolean intToTimestampInSeconds) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index 51a0aed..10af3dc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -24,7 +24,7 @@ import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -346,9 +346,9 @@ public final class PrimitiveObjectInspectorFactory {
       return new WritableConstantHiveVarcharObjectInspector((VarcharTypeInfo)typeInfo,
           (HiveVarcharWritable)value);
     case DATE:
-      return new WritableConstantDateObjectInspector((DateWritableV2)value);
+      return new WritableConstantDateObjectInspector((DateWritable)value);
     case TIMESTAMP:
-      return new WritableConstantTimestampObjectInspector((TimestampWritableV2)value);
+      return new WritableConstantTimestampObjectInspector((TimestampWritable)value);
     case TIMESTAMPLOCALTZ:
       return new WritableConstantTimestampLocalTZObjectInspector((TimestampLocalTZTypeInfo)typeInfo, (TimestampLocalTZWritable) value);
     case INTERVAL_YEAR_MONTH:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 6362f2e..8cf0744 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -23,6 +23,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
 import java.nio.charset.StandardCharsets;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.time.DateTimeException;
 import java.time.ZoneId;
 import java.util.HashMap;
@@ -30,19 +32,18 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.common.type.TimestampTZ;
-import org.apache.hadoop.hive.common.type.TimestampTZUtil;
-import org.apache.hadoop.hive.common.type.TimestampUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -50,8 +51,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -229,10 +229,10 @@ public final class PrimitiveObjectInspectorUtils {
       Short.class, ShortWritable.class);
   public static final PrimitiveTypeEntry dateTypeEntry = new PrimitiveTypeEntry(
       PrimitiveCategory.DATE, serdeConstants.DATE_TYPE_NAME, null,
-      Date.class, DateWritableV2.class);
+      Date.class, DateWritable.class);
   public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
       PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null,
-      Timestamp.class, TimestampWritableV2.class);
+      Timestamp.class, TimestampWritable.class);
   public static final PrimitiveTypeEntry timestampTZTypeEntry = new PrimitiveTypeEntry(
       PrimitiveCategory.TIMESTAMPLOCALTZ, serdeConstants.TIMESTAMPLOCALTZ_TYPE_NAME, null,
       TimestampTZ.class, TimestampLocalTZWritable.class);
@@ -1126,7 +1126,7 @@ public final class PrimitiveObjectInspectorUtils {
       } catch (IllegalArgumentException e) {
         Timestamp ts = getTimestampFromString(s);
         if (ts != null) {
-          result = Date.ofEpochMilli(ts.toEpochMilli());
+          result = new Date(ts.getTime());
         } else {
           result = null;
         }
@@ -1140,7 +1140,7 @@ public final class PrimitiveObjectInspectorUtils {
       } catch (IllegalArgumentException e) {
         Timestamp ts = getTimestampFromString(val);
         if (ts != null) {
-          result = Date.ofEpochMilli(ts.toEpochMilli());
+          result = new Date(ts.getTime());
         } else {
           result = null;
         }
@@ -1151,7 +1151,7 @@ public final class PrimitiveObjectInspectorUtils {
       result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).get();
       break;
     case TIMESTAMP:
-      result = DateWritableV2.timeToDate(
+      result = DateWritable.timeToDate(
           ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds());
       break;
     case TIMESTAMPLOCALTZ:
@@ -1187,23 +1187,23 @@ public final class PrimitiveObjectInspectorUtils {
       break;
     case BOOLEAN:
       longValue = ((BooleanObjectInspector) inputOI).get(o) ? 1 : 0;
-      result = TimestampWritableV2.longToTimestamp(longValue, intToTimestampInSeconds);
+      result = TimestampWritable.longToTimestamp(longValue, intToTimestampInSeconds);
       break;
     case BYTE:
       longValue = ((ByteObjectInspector) inputOI).get(o);
-      result = TimestampWritableV2.longToTimestamp(longValue, intToTimestampInSeconds);
+      result = TimestampWritable.longToTimestamp(longValue, intToTimestampInSeconds);
       break;
     case SHORT:
       longValue = ((ShortObjectInspector) inputOI).get(o);
-      result = TimestampWritableV2.longToTimestamp(longValue, intToTimestampInSeconds);
+      result = TimestampWritable.longToTimestamp(longValue, intToTimestampInSeconds);
       break;
     case INT:
       longValue = ((IntObjectInspector) inputOI).get(o);
-      result = TimestampWritableV2.longToTimestamp(longValue, intToTimestampInSeconds);
+      result = TimestampWritable.longToTimestamp(longValue, intToTimestampInSeconds);
       break;
     case LONG:
       longValue = ((LongObjectInspector) inputOI).get(o);
-      result = TimestampWritableV2.longToTimestamp(longValue, intToTimestampInSeconds);
+      result = TimestampWritable.longToTimestamp(longValue, intToTimestampInSeconds);
       break;
     case FLOAT:
       result = TimestampUtils.doubleToTimestamp(((FloatObjectInspector) inputOI).get(o));
@@ -1212,8 +1212,8 @@ public final class PrimitiveObjectInspectorUtils {
       result = TimestampUtils.doubleToTimestamp(((DoubleObjectInspector) inputOI).get(o));
       break;
     case DECIMAL:
-      result = TimestampUtils.decimalToTimestamp(
-          ((HiveDecimalObjectInspector) inputOI).getPrimitiveJavaObject(o));
+      result = TimestampUtils.decimalToTimestamp(((HiveDecimalObjectInspector) inputOI)
+                                                    .getPrimitiveJavaObject(o));
       break;
     case STRING:
       StringObjectInspector soi = (StringObjectInspector) inputOI;
@@ -1225,8 +1225,8 @@ public final class PrimitiveObjectInspectorUtils {
       result = getTimestampFromString(getString(o, inputOI));
       break;
     case DATE:
-      result = Timestamp.ofEpochMilli(
-          ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().toEpochMilli());
+      result = new Timestamp(
+          ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().getTime());
       break;
     case TIMESTAMP:
       result = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
@@ -1247,25 +1247,23 @@ public final class PrimitiveObjectInspectorUtils {
     return result;
   }
 
-  public static Timestamp getTimestampFromString(String s) {
+  static Timestamp getTimestampFromString(String s) {
     Timestamp result;
     s = s.trim();
     s = trimNanoTimestamp(s);
 
+    int firstSpace = s.indexOf(' ');
+    if (firstSpace < 0) {
+      s = s.concat(" 00:00:00");
+    }
     try {
       result = Timestamp.valueOf(s);
     } catch (IllegalArgumentException e) {
       // Let's try to parse it as timestamp with time zone and transform
       try {
-        result = Timestamp.valueOf(TimestampTZUtil.parse(s).getZonedDateTime()
-            .toLocalDateTime().toString());
+        result = Timestamp.from(TimestampTZUtil.parse(s).getZonedDateTime().toInstant());
       } catch (DateTimeException e2) {
-        // Last try: we try to parse it as date and transform
-        try {
-          result = Timestamp.ofEpochMilli(Date.valueOf(s).toEpochMilli());
-        } catch (IllegalArgumentException e3) {
-          result = null;
-        }
+        result = null;
       }
     }
     return result;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
index 725d5cd..831411d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
@@ -17,22 +17,17 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import java.sql.Date;
+
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * A SettableDecimalObjectInspector can set a Date value to an object.
  */
 public interface SettableDateObjectInspector extends DateObjectInspector {
-  @Deprecated
-  Object set(Object o, java.sql.Date d);
-
   Object set(Object o, Date d);
 
-  Object set(Object o, DateWritableV2 d);
-
-  @Deprecated
-  Object create(java.sql.Date d);
+  Object set(Object o, DateWritable d);
 
   Object create(Date d);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
index 65cae16..c676a62 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
@@ -17,25 +17,20 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 
 public interface SettableTimestampObjectInspector extends TimestampObjectInspector {
 
   Object set(Object o, byte[] bytes, int offset);
 
-  @Deprecated
-  Object set(Object o, java.sql.Timestamp t);
-
   Object set(Object o, Timestamp t);
 
-  Object set(Object o, TimestampWritableV2 t);
+  Object set(Object o, TimestampWritable t);
 
   Object create(byte[] bytes, int offset);
 
-  @Deprecated
-  Object create (java.sql.Timestamp t);
-
   Object create (Timestamp t);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
index 6eb2aac..f277232 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public interface TimestampObjectInspector extends PrimitiveObjectInspector {
 
-  TimestampWritableV2 getPrimitiveWritableObject(Object o);
+  TimestampWritable getPrimitiveWritableObject(Object o);
 
   Timestamp getPrimitiveJavaObject(Object o);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
index 7e6cc8d..290fcd3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+
 /**
  * A WritableConstantDateObjectInspector is a WritableDateObjectInspector
  * that implements ConstantObjectInspector.
@@ -28,18 +29,18 @@ public class WritableConstantDateObjectInspector extends
     WritableDateObjectInspector implements
     ConstantObjectInspector {
 
-  private DateWritableV2 value;
+  private DateWritable value;
 
   protected WritableConstantDateObjectInspector() {
     super();
   }
-  WritableConstantDateObjectInspector(DateWritableV2 value) {
+  WritableConstantDateObjectInspector(DateWritable value) {
     super();
     this.value = value;
   }
 
   @Override
-  public DateWritableV2 getWritableConstantValue() {
+  public DateWritable getWritableConstantValue() {
     return value;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampObjectInspector.java
index 9428421..dc8fedf 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampObjectInspector.java
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+
 /**
  * A WritableConstantTimestampObjectInspector is a WritableTimestampObjectInspector
  * that implements ConstantObjectInspector.
@@ -28,18 +29,18 @@ public class WritableConstantTimestampObjectInspector extends
     WritableTimestampObjectInspector implements
     ConstantObjectInspector {
 
-  private TimestampWritableV2 value;
+  private TimestampWritable value;
 
   protected WritableConstantTimestampObjectInspector() {
     super();
   }
-  WritableConstantTimestampObjectInspector(TimestampWritableV2 value) {
+  WritableConstantTimestampObjectInspector(TimestampWritable value) {
     super();
     this.value = value;
   }
 
   @Override
-  public TimestampWritableV2 getWritableConstantValue() {
+  public TimestampWritable getWritableConstantValue() {
     return value;
   }
 }


[29/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index ebea31d..596edde 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import java.io.IOException;
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
-import java.text.ParseException;
+import java.sql.Date;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -43,7 +43,6 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -61,6 +60,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
+import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -96,7 +96,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentUser;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -109,6 +109,9 @@ import org.slf4j.LoggerFactory;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
 
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
+import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
+
 /**
  * BaseSemanticAnalyzer.
  *
@@ -2095,19 +2098,14 @@ public abstract class BaseSemanticAnalyzer {
   private static String normalizeDateCol(
       Object colValue, String originalColSpec) throws SemanticException {
     Date value;
-    if (colValue instanceof DateWritableV2) {
-      value = ((DateWritableV2) colValue).get(); // Time doesn't matter.
+    if (colValue instanceof DateWritable) {
+      value = ((DateWritable) colValue).get(false); // Time doesn't matter.
     } else if (colValue instanceof Date) {
       value = (Date) colValue;
     } else {
       throw new SemanticException("Unexpected date type " + colValue.getClass());
     }
-    try {
-      return MetaStoreUtils.PARTITION_DATE_FORMAT.get().format(
-          MetaStoreUtils.PARTITION_DATE_FORMAT.get().parse(value.toString()));
-    } catch (ParseException e) {
-      throw new SemanticException(e);
-    }
+    return MetaStoreUtils.PARTITION_DATE_FORMAT.get().format(value);
   }
 
   protected WriteEntity toWriteEntity(String location) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index d8c7d7f..2506172 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -19,7 +19,8 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.math.BigDecimal;
-
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -32,12 +33,10 @@ import java.util.Stack;
 import org.apache.calcite.rel.RelNode;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.math.NumberUtils;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZUtil;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.ErrorMsg;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
index 31c9682..607545d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.metastore.columnstats.cache.DoubleColumnStatsDataI
 import org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataInspector;
 import org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -197,10 +197,10 @@ public class ColumnStatisticsObjTranslator {
       long v = ((LongObjectInspector) oi).get(o);
       statsObj.getStatsData().getDateStats().setNumDVs(v);
     } else if (fName.equals("max")) {
-      DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
       statsObj.getStatsData().getDateStats().setHighValue(new Date(v.getDays()));
     } else if (fName.equals("min")) {
-      DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+      DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
       statsObj.getStatsData().getDateStats().setLowValue(new Date(v.getDays()));
     } else if (fName.equals("ndvbitvector")) {
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
index 2fb8844..21164b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
@@ -25,11 +25,10 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.joda.time.Chronology;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -47,22 +46,26 @@ import com.google.common.collect.ImmutableMap;
 public abstract class UDFDateFloor extends UDF {
 
   private final QueryGranularity granularity;
-  private final TimestampWritableV2 resultTS;
+  private final TimestampWritable resultTS;
   private final TimestampLocalTZWritable resultTSLTZ;
 
   public UDFDateFloor(String granularity) {
     this.granularity = QueryGranularity.fromString(granularity);
-    this.resultTS = new TimestampWritableV2();
+    this.resultTS = new TimestampWritable();
     this.resultTSLTZ = new TimestampLocalTZWritable();
   }
 
-  public TimestampWritableV2 evaluate(TimestampWritableV2 t) {
+  public TimestampWritable evaluate(TimestampWritable t) {
     if (t == null) {
       return null;
     }
-    final long originalTimestamp = t.getTimestamp().toEpochMilli();
-    final long newTimestamp = granularity.truncate(originalTimestamp);
-    resultTS.set(Timestamp.ofEpochMilli(newTimestamp));
+    final long originalTimestamp = t.getTimestamp().getTime(); // default
+    final long originalTimestampUTC = new DateTime(originalTimestamp)
+        .withZoneRetainFields(DateTimeZone.UTC).getMillis(); // default -> utc
+    final long newTimestampUTC = granularity.truncate(originalTimestampUTC); // utc
+    final long newTimestamp = new DateTime(newTimestampUTC, DateTimeZone.UTC)
+        .withZoneRetainFields(DateTimeZone.getDefault()).getMillis(); // utc -> default
+    resultTS.setTime(newTimestamp);
     return resultTS;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index 72fa263..f774954 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -18,26 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
+import org.apache.hadoop.io.Text;
 
 /**
  * UDFDayOfMonth.
@@ -54,81 +51,66 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  30")
 @VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class})
 @NDV(maxNdv = 31)
-public class UDFDayOfMonth extends GenericUDF {
+public class UDFDayOfMonth extends UDF {
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
+
+  private final IntWritable result = new IntWritable();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  public UDFDayOfMonth() {
+  }
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  /**
+   * Get the day of month from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 1 to 31. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_DAY_TIME:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainDateConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+    if (dateString == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    try {
+      Date date = formatter.parse(dateString.toString());
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.DAY_OF_MONTH));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
   }
 
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_DAY_TIME:
-        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
-        if (intervalDayTime == null) {
-          return null;
-        }
-        output.set(intervalDayTime.getDays());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Date date = getDateValue(arguments, 0, inputTypes, converters);
-        if (date == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(date.toEpochMilli());
-        output.set(calendar.get(Calendar.DAY_OF_MONTH));
+  public IntWritable evaluate(DateWritable d) {
+    if (d == null) {
+      return null;
     }
-    return output;
+
+    calendar.setTime(d.get(false)); // Time doesn't matter.
+    result.set(calendar.get(Calendar.DAY_OF_MONTH));
+    return result;
   }
 
-  @Override
-  protected String getFuncName() {
-    return "day";
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
+    }
+
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.DAY_OF_MONTH));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getDays());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
index defa9d1..88e6d94 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
@@ -18,7 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -26,8 +30,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
@@ -47,6 +51,8 @@ import org.apache.hadoop.io.Text;
 @VectorizedExpressions({VectorUDFDayOfWeekDate.class, VectorUDFDayOfWeekString.class, VectorUDFDayOfWeekTimestamp.class})
 @NDV(maxNdv = 7)
 public class UDFDayOfWeek extends UDF {
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
   private final IntWritable result = new IntWritable();
 
@@ -67,29 +73,32 @@ public class UDFDayOfWeek extends UDF {
       return null;
     }
     try {
-      Date date = Date.valueOf(dateString.toString());
-      result.set(date.getDayOfWeek());
+      Date date = formatter.parse(dateString.toString());
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.DAY_OF_WEEK));
       return result;
-    } catch (IllegalArgumentException e) {
+    } catch (ParseException e) {
       return null;
     }
   }
 
-  public IntWritable evaluate(DateWritableV2 d) {
+  public IntWritable evaluate(DateWritable d) {
     if (d == null) {
       return null;
     }
 
-    result.set(d.get().getDayOfWeek());
+    calendar.setTime(d.get(false)); // Time doesn't matter.
+    result.set(calendar.get(Calendar.DAY_OF_WEEK));
     return result;
   }
 
-  public IntWritable evaluate(TimestampWritableV2 t) {
+  public IntWritable evaluate(TimestampWritable t) {
     if (t == null) {
       return null;
     }
 
-    result.set(t.getTimestamp().getDayOfWeek());
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.DAY_OF_WEEK));
     return result;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
index 3cee0c1..8f531fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.udf;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
@@ -120,7 +119,6 @@ public class UDFFromUnixTime extends UDF {
   private Text eval(long unixtime, Text format) {
     if (!format.equals(lastFormat)) {
       formatter = new SimpleDateFormat(format.toString());
-      formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
       lastFormat.set(format);
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
index f906f36..a0c4e96 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
@@ -18,26 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
+import org.apache.hadoop.io.Text;
 
 /**
  * UDFHour.
@@ -55,82 +51,62 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  12")
 @VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class})
 @NDV(maxNdv = 24)
-public class UDFHour extends GenericUDF {
+public class UDFHour extends UDF {
+  private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
+  private final Calendar calendar = Calendar.getInstance();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  public UDFHour() {
+  }
 
+  /**
+   * Get the hour from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 0 to 23. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_DAY_TIME:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainTimestampConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+    if (dateString == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    try {
+      Date date = null;
+      try {
+        date = formatter1.parse(dateString.toString());
+      } catch (ParseException e) {
+        date = formatter2.parse(dateString.toString());
+      }
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.HOUR_OF_DAY));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
   }
 
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_DAY_TIME:
-        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
-        if (intervalDayTime == null) {
-          return null;
-        }
-        output.set(intervalDayTime.getHours());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Timestamp ts = getTimestampValue(arguments, 0, converters);
-        if (ts == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(ts.toEpochMilli());
-        output.set(calendar.get(Calendar.HOUR_OF_DAY));
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
     }
-    return output;
-  }
 
-  @Override
-  protected String getFuncName() {
-    return "hour";
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.HOUR_OF_DAY));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getHours());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
index 2e62173..306d458 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
@@ -18,26 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
+import org.apache.hadoop.io.Text;
 
 /**
  * UDFMinute.
@@ -55,82 +51,62 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  58")
 @VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class})
 @NDV(maxNdv = 60)
-public class UDFMinute extends GenericUDF {
+public class UDFMinute extends UDF {
+  private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
+  private final Calendar calendar = Calendar.getInstance();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  public UDFMinute() {
+  }
 
+  /**
+   * Get the minute from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 0 to 59. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_DAY_TIME:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainTimestampConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+    if (dateString == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    try {
+      Date date = null;
+      try {
+        date = formatter1.parse(dateString.toString());
+      } catch (ParseException e) {
+        date = formatter2.parse(dateString.toString());
+      }
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.MINUTE));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
   }
 
-  @Override
-  public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_DAY_TIME:
-        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
-        if (intervalDayTime == null) {
-          return null;
-        }
-        output.set(intervalDayTime.getMinutes());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Timestamp ts = getTimestampValue(arguments, 0, converters);
-        if (ts == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(ts.toEpochMilli());
-        output.set(calendar.get(Calendar.MINUTE));
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
     }
-    return output;
-  }
 
-  @Override
-  protected String getFuncName() {
-    return "minute";
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.MINUTE));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getMinutes());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 1a8b2da..7995934 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -18,26 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
+import org.apache.hadoop.io.Text;
 
 /**
  * UDFMonth.
@@ -54,82 +51,64 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  7")
 @VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class})
 @NDV(maxNdv = 31)
-public class UDFMonth extends GenericUDF {
+public class UDFMonth extends UDF {
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  public UDFMonth() {
+  }
 
+  /**
+   * Get the month from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 1 to 12. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
+    if (dateString == null) {
+      return null;
+    }
+    try {
+      Date date = formatter.parse(dateString.toString());
+      calendar.setTime(date);
+      result.set(1 + calendar.get(Calendar.MONTH));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
+  }
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_YEAR_MONTH:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainDateConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+  public IntWritable evaluate(DateWritable d) {
+    if (d == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    calendar.setTime(d.get(false));  // Time doesn't matter.
+    result.set(1 + calendar.get(Calendar.MONTH));
+    return result;
   }
 
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_YEAR_MONTH:
-        HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
-        if (intervalYearMonth == null) {
-          return null;
-        }
-        output.set(intervalYearMonth.getMonths());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Date date = getDateValue(arguments, 0, inputTypes, converters);
-        if (date == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(date.toEpochMilli());
-        output.set(1 + calendar.get(Calendar.MONTH));
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
     }
-    return output;
-  }
 
-  @Override
-  protected String getFuncName() {
-    return "month";
+    calendar.setTime(t.getTimestamp());
+    result.set(1 + calendar.get(Calendar.MONTH));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalYearMonth().getMonths());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
index 31a8529..5bf8b24 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
@@ -18,27 +18,25 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
-
+import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * UDFSecond.
@@ -56,82 +54,64 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  59")
 @VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class})
 @NDV(maxNdv = 60)
-public class UDFSecond extends GenericUDF {
+public class UDFSecond extends UDF {
+  private final SimpleDateFormat formatter1 = new SimpleDateFormat(
+      "yyyy-MM-dd HH:mm:ss");
+  private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
+  private final Calendar calendar = Calendar.getInstance();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  public UDFSecond() {
+  }
 
+  /**
+   * Get the minute from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 0 to 59. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_DAY_TIME:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainTimestampConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+    if (dateString == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    try {
+      Date date = null;
+      try {
+        date = formatter1.parse(dateString.toString());
+      } catch (ParseException e) {
+        date = formatter2.parse(dateString.toString());
+      }
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.SECOND));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
   }
 
-  @Override
-  public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_DAY_TIME:
-        HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
-        if (intervalDayTime == null) {
-          return null;
-        }
-        output.set(intervalDayTime.getSeconds());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Timestamp ts = getTimestampValue(arguments, 0, converters);
-        if (ts == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(ts.toEpochMilli());
-        output.set(calendar.get(Calendar.SECOND));
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
     }
-    return output;
-  }
 
-  @Override
-  protected String getFuncName() {
-    return "second";
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.SECOND));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    HiveIntervalDayTime idt = i.getHiveIntervalDayTime();
+    result.set(idt.getSeconds());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index a7f4bf1..3ac7a06 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -30,11 +30,11 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanVi
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDateToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -179,12 +179,12 @@ public class UDFToBoolean extends UDF {
     return booleanWritable;
   }
 
-  public BooleanWritable evaluate(DateWritableV2 d) {
+  public BooleanWritable evaluate(DateWritable d) {
     // date value to boolean doesn't make any sense.
     return null;
   }
 
-  public BooleanWritable evaluate(TimestampWritableV2 i) {
+  public BooleanWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index 8f4ec3b..1128b32 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -183,7 +183,7 @@ public class UDFToByte extends UDF {
     }
   }
 
-  public ByteWritable evaluate(TimestampWritableV2 i) {
+  public ByteWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index 7a01452..a8de3d5 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -180,7 +180,7 @@ public class UDFToDouble extends UDF {
     }
   }
 
-  public DoubleWritable evaluate(TimestampWritableV2 i) {
+  public DoubleWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 451b45f..2872ff2 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -181,7 +181,7 @@ public class UDFToFloat extends UDF {
     }
   }
 
-  public FloatWritable evaluate(TimestampWritableV2 i) {
+  public FloatWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index 4fe9c32..748a688 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -193,7 +193,7 @@ public class UDFToInteger extends UDF {
    *          The Timestamp value to convert
    * @return IntWritable
    */
-  public IntWritable evaluate(TimestampWritableV2 i) {
+  public IntWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
index b31eeb0..e286652 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -195,7 +195,7 @@ public class UDFToLong extends UDF {
     }
   }
 
-  public LongWritable evaluate(TimestampWritableV2 i) {
+  public LongWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index 315789c..e003ff3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
@@ -28,7 +29,7 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyShort;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.io.BooleanWritable;
@@ -184,7 +185,7 @@ public class UDFToShort extends UDF {
     }
   }
 
-  public ShortWritable evaluate(TimestampWritableV2 i) {
+  public ShortWritable evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index a16d429..557cb1c 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -21,12 +21,12 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
 import org.apache.hadoop.io.BooleanWritable;
@@ -135,7 +135,7 @@ public class UDFToString extends UDF {
       return i;
   }
 
-  public Text evaluate(DateWritableV2 d) {
+  public Text evaluate(DateWritable d) {
     if (d == null) {
       return null;
     } else {
@@ -144,7 +144,7 @@ public class UDFToString extends UDF {
     }
   }
 
-  public Text evaluate(TimestampWritableV2 i) {
+  public Text evaluate(TimestampWritable i) {
     if (i == null) {
       return null;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index 18ca9a7..18ed52d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
 import java.util.Calendar;
-import java.util.TimeZone;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -30,8 +30,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearDate
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
@@ -49,12 +49,11 @@ import org.apache.hadoop.io.Text;
 @VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class})
 @NDV(maxNdv = 52)
 public class UDFWeekOfYear extends UDF {
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
   private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
-
-
   public UDFWeekOfYear() {
     calendar.setFirstDayOfWeek(Calendar.MONDAY);
     calendar.setMinimalDaysInFirstWeek(4);
@@ -74,32 +73,31 @@ public class UDFWeekOfYear extends UDF {
       return null;
     }
     try {
-      Date date = Date.valueOf(dateString.toString());
-      calendar.setTimeInMillis(date.toEpochMilli());
+      Date date = formatter.parse(dateString.toString());
+      calendar.setTime(date);
       result.set(calendar.get(Calendar.WEEK_OF_YEAR));
       return result;
-    } catch (IllegalArgumentException e) {
+    } catch (ParseException e) {
       return null;
     }
   }
 
-  public IntWritable evaluate(DateWritableV2 d) {
+  public IntWritable evaluate(DateWritable d) {
     if (d == null) {
       return null;
     }
-    Date date = d.get();
-    calendar.setTimeInMillis(date.toEpochMilli());
+
+    calendar.setTime(d.get(false));  // Time doesn't matter.
     result.set(calendar.get(Calendar.WEEK_OF_YEAR));
     return result;
   }
 
-  public IntWritable evaluate(TimestampWritableV2 t) {
+  public IntWritable evaluate(TimestampWritable t) {
     if (t == null) {
       return null;
     }
 
-    Timestamp ts = t.getTimestamp();
-    calendar.setTimeInMillis(ts.toEpochMilli());
+    calendar.setTime(t.getTimestamp());
     result.set(calendar.get(Calendar.WEEK_OF_YEAR));
     return result;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index fcbb57f..8417591 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -18,26 +18,23 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
-
-import java.util.Calendar;
-import java.util.TimeZone;
+import org.apache.hadoop.io.Text;
 
 /**
  * UDFYear.
@@ -54,82 +51,66 @@ import java.util.TimeZone;
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  2009")
 @VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class})
 @NDV(maxNdv = 20) // although technically its unbounded, its unlikely we will ever see ndv > 20
-public class UDFYear extends GenericUDF {
+public class UDFYear extends UDF {
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
-  private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
-  private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
-  private final IntWritable output = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  public UDFYear() {
+  }
 
+  /**
+   * Get the year from a date string.
+   *
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 1 to 12. null if the dateString is not a valid date
+   *         string.
+   */
+  public IntWritable evaluate(Text dateString) {
 
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    checkArgsSize(arguments, 1, 1);
-    checkArgPrimitive(arguments, 0);
-    switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
-      case INTERVAL_YEAR_MONTH:
-        inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
-        converters[0] = ObjectInspectorConverters.getConverter(
-            arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        obtainDateConverter(arguments, 0, inputTypes, converters);
-        break;
-      default:
-        // build error message
-        StringBuilder sb = new StringBuilder();
-        sb.append(getFuncName());
-        sb.append(" does not take ");
-        sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
-        sb.append(" type");
-        throw new UDFArgumentTypeException(0, sb.toString());
+    if (dateString == null) {
+      return null;
     }
 
-    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    return outputOI;
+    try {
+      Date date = formatter.parse(dateString.toString());
+      calendar.setTime(date);
+      result.set(calendar.get(Calendar.YEAR));
+      return result;
+    } catch (ParseException e) {
+      return null;
+    }
   }
 
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    switch (inputTypes[0]) {
-      case INTERVAL_YEAR_MONTH:
-        HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
-        if (intervalYearMonth == null) {
-          return null;
-        }
-        output.set(intervalYearMonth.getYears());
-        break;
-      case STRING:
-      case CHAR:
-      case VARCHAR:
-      case DATE:
-      case TIMESTAMP:
-      case TIMESTAMPLOCALTZ:
-      case VOID:
-        Date date = getDateValue(arguments, 0, inputTypes, converters);
-        if (date == null) {
-          return null;
-        }
-        calendar.setTimeInMillis(date.toEpochMilli());
-        output.set(calendar.get(Calendar.YEAR));
+  public IntWritable evaluate(DateWritable d) {
+    if (d == null) {
+      return null;
     }
-    return output;
+
+    calendar.setTime(d.get(false));  // Time doesn't matter.
+    result.set(calendar.get(Calendar.YEAR));
+    return result;
   }
 
-  @Override
-  protected String getFuncName() {
-    return "year";
+  public IntWritable evaluate(TimestampWritable t) {
+    if (t == null) {
+      return null;
+    }
+
+    calendar.setTime(t.getTimestamp());
+    result.set(calendar.get(Calendar.YEAR));
+    return result;
   }
 
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(getFuncName(), children);
+  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalYearMonth().getYears());
+    return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
index d69a4f7..5c67242 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -35,6 +34,8 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
+import java.sql.Date;
+
 
 public abstract class BaseMaskUDF extends GenericUDF {
   private static final Log LOG = LogFactory.getLog(BaseMaskUDF.class);
@@ -227,13 +228,13 @@ class ByteTransformerAdapter extends AbstractTransformerAdapter {
 
 class DateTransformerAdapter extends AbstractTransformerAdapter {
   final DateObjectInspector columnType;
-  final DateWritableV2 writable;
+  final DateWritable        writable;
 
   public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer) {
-    this(columnType, transformer, new DateWritableV2());
+    this(columnType, transformer, new DateWritable());
   }
 
-  public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritableV2 writable) {
+  public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritable writable) {
     super(transformer);
 
     this.columnType = columnType;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
index a8bcc97..ca8bc8f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
@@ -20,19 +20,19 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
 import org.apache.hadoop.hive.ql.plan.Statistics;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -46,6 +46,7 @@ import org.apache.hive.common.util.BloomKFilter;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.sql.Timestamp;
 import java.util.List;
 
 /**
@@ -196,14 +197,14 @@ public class GenericUDAFBloomFilter implements GenericUDAFResolver2 {
           bf.addBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
           break;
         case DATE:
-          DateWritableV2 vDate = ((DateObjectInspector)inputOI).
+          DateWritable vDate = ((DateObjectInspector)inputOI).
                   getPrimitiveWritableObject(parameters[0]);
           bf.addLong(vDate.getDays());
           break;
         case TIMESTAMP:
           Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI).
                   getPrimitiveJavaObject(parameters[0]);
-          bf.addLong(vTimeStamp.toEpochMilli());
+          bf.addLong(vTimeStamp.getTime());
           break;
         case CHAR:
           Text vChar = ((HiveCharObjectInspector)inputOI).

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
index dd365dd..2267589 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -1297,7 +1297,7 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
    * High/low value will be saved in stats DB as long value representing days since epoch.
    */
   public static class GenericUDAFDateStatsEvaluator
-      extends GenericUDAFNumericStatsEvaluator<DateWritableV2, DateObjectInspector> {
+      extends GenericUDAFNumericStatsEvaluator<DateWritable, DateObjectInspector> {
 
     @Override
     protected DateObjectInspector getValueObjectInspector() {
@@ -1319,8 +1319,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
 
       @Override
       protected void update(Object p, PrimitiveObjectInspector inputOI) {
-        // DateWritableV2 is mutable, DateStatsAgg needs its own copy
-        DateWritableV2 v = new DateWritableV2((DateWritableV2) inputOI.getPrimitiveWritableObject(p));
+        // DateWritable is mutable, DateStatsAgg needs its own copy
+        DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
 
         //Update min counter if new value is less than min seen so far
         if (min == null || v.compareTo(min) < 0) {
@@ -1338,8 +1338,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
       protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
         if ((minValue != null) && (min == null ||
             min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
-          // DateWritableV2 is mutable, DateStatsAgg needs its own copy
-          min = new DateWritableV2(minFieldOI.getPrimitiveWritableObject(minValue));
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
         }
       }
 
@@ -1347,8 +1347,8 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver {
       protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
         if ((maxValue != null) && (max == null ||
             max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
-          // DateWritableV2 is mutable, DateStatsAgg needs its own copy
-          max = new DateWritableV2(maxFieldOI.getPrimitiveWritableObject(maxValue));
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
         }
       }
     };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index 0d8d659..710f0e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -20,13 +20,12 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.util.Date;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -35,12 +34,10 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -48,12 +45,14 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.C
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * A Generic User-defined function (GenericUDF) for the use with Hive.
@@ -490,7 +489,7 @@ public abstract class GenericUDF implements Closeable {
   }
 
   protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
-                              Converter[] converters) throws HiveException {
+      Converter[] converters) throws HiveException {
     Object obj;
     if ((obj = arguments[i].get()) == null) {
       return null;
@@ -503,16 +502,16 @@ public abstract class GenericUDF implements Closeable {
     case CHAR:
       String dateStr = converters[i].convert(obj).toString();
       try {
-        date = Date.valueOf(dateStr);
-      } catch (IllegalArgumentException e) {
-        date = null;
+        date = DateUtils.getDateFormat().parse(dateStr);
+      } catch (ParseException e) {
+        throw new UDFArgumentException("Unparsable date: " + dateStr);
       }
       break;
     case TIMESTAMP:
     case DATE:
     case TIMESTAMPLOCALTZ:
       Object writableValue = converters[i].convert(obj);
-      date = ((DateWritableV2) writableValue).get();
+      date = ((DateWritable) writableValue).get();
       break;
     default:
       throw new UDFArgumentTypeException(0, getFuncName()
@@ -532,62 +531,10 @@ public abstract class GenericUDF implements Closeable {
     if (writableValue == null) {
       return null;
     }
-    Timestamp ts = ((TimestampWritableV2) writableValue).getTimestamp();
+    Timestamp ts = ((TimestampWritable) writableValue).getTimestamp();
     return ts;
   }
 
-  protected HiveIntervalYearMonth getIntervalYearMonthValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
-      Converter[] converters) throws HiveException {
-    Object obj;
-    if ((obj = arguments[i].get()) == null) {
-      return null;
-    }
-
-    HiveIntervalYearMonth intervalYearMonth;
-    switch (inputTypes[i]) {
-      case STRING:
-      case VARCHAR:
-      case CHAR:
-        String intervalYearMonthStr = converters[i].convert(obj).toString();
-        intervalYearMonth = HiveIntervalYearMonth.valueOf(intervalYearMonthStr);
-        break;
-      case INTERVAL_YEAR_MONTH:
-        Object writableValue = converters[i].convert(obj);
-        intervalYearMonth = ((HiveIntervalYearMonthWritable) writableValue).getHiveIntervalYearMonth();
-        break;
-      default:
-        throw new UDFArgumentTypeException(0, getFuncName()
-            + " only takes INTERVAL_YEAR_MONTH and STRING_GROUP types, got " + inputTypes[i]);
-    }
-    return intervalYearMonth;
-  }
-
-  protected HiveIntervalDayTime getIntervalDayTimeValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
-      Converter[] converters) throws HiveException {
-    Object obj;
-    if ((obj = arguments[i].get()) == null) {
-      return null;
-    }
-
-    HiveIntervalDayTime intervalDayTime;
-    switch (inputTypes[i]) {
-      case STRING:
-      case VARCHAR:
-      case CHAR:
-        String intervalDayTimeStr = converters[i].convert(obj).toString();
-        intervalDayTime = HiveIntervalDayTime.valueOf(intervalDayTimeStr);
-        break;
-      case INTERVAL_DAY_TIME:
-        Object writableValue = converters[i].convert(obj);
-        intervalDayTime = ((HiveIntervalDayTimeWritable) writableValue).getHiveIntervalDayTime();
-        break;
-      default:
-        throw new UDFArgumentTypeException(0, getFuncName()
-            + " only takes INTERVAL_DAY_TIME and STRING_GROUP types, got " + inputTypes[i]);
-    }
-    return intervalDayTime;
-  }
-
   protected String getConstantStringValue(ObjectInspector[] arguments, int i) {
     Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
     String str = constValue == null ? null : constValue.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
index 6df0913..ea1544f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
@@ -24,10 +24,8 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
-import java.util.TimeZone;
+import java.util.Date;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -64,7 +62,7 @@ public class GenericUDFAddMonths extends GenericUDF {
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[3];
   private final Text output = new Text();
   private transient SimpleDateFormat formatter = null;
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = Calendar.getInstance();
   private transient Integer numMonthsConst;
   private transient boolean isNumMonthsConst;
 
@@ -82,7 +80,6 @@ public class GenericUDFAddMonths extends GenericUDF {
         String fmtStr = getConstantStringValue(arguments, 2);
         if (fmtStr != null) {
           formatter = new SimpleDateFormat(fmtStr);
-          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
         }
       } else {
         throw new UDFArgumentTypeException(2, getFuncName() + " only takes constant as "
@@ -131,19 +128,17 @@ public class GenericUDFAddMonths extends GenericUDF {
 
     // the function should support both short date and full timestamp format
     // time part of the timestamp should not be skipped
-    Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
-    if (ts != null) {
-      addMonth(ts, numMonthInt);
-    } else {
-      Date date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
-      if (date != null) {
-        addMonth(date, numMonthInt);
-      } else {
+    Date date = getTimestampValue(arguments, 0, tsConverters);
+    if (date == null) {
+      date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+      if (date == null) {
         return null;
       }
     }
 
-    String res = formatter.format(calendar.getTime());
+    addMonth(date, numMonthInt);
+    Date newDate = calendar.getTime();
+    String res = formatter.format(newDate);
 
     output.set(res);
     return output;
@@ -159,19 +154,9 @@ public class GenericUDFAddMonths extends GenericUDF {
     return "add_months";
   }
 
-  private Calendar addMonth(Date d, int numMonths) {
-    calendar.setTimeInMillis(d.toEpochMilli());
-
-    return addMonth(numMonths);
-  }
-
-  private Calendar addMonth(Timestamp ts, int numMonths) {
-    calendar.setTimeInMillis(ts.toEpochMilli());
-
-    return addMonth(numMonths);
-  }
+  protected Calendar addMonth(Date d, int numMonths) {
+    calendar.setTime(d);
 
-  private Calendar addMonth(int numMonths) {
     boolean lastDatOfMonth = isLastDayOfMonth(calendar);
 
     calendar.add(Calendar.MONTH, numMonths);
@@ -183,7 +168,7 @@ public class GenericUDFAddMonths extends GenericUDF {
     return calendar;
   }
 
-  private boolean isLastDayOfMonth(Calendar cal) {
+  protected boolean isLastDayOfMonth(Calendar cal) {
     int maxDd = cal.getActualMaximum(Calendar.DAY_OF_MONTH);
     int dd = cal.get(Calendar.DAY_OF_MONTH);
     return dd == maxDd;


[03/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
index 8bd77f9..72e9916 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
@@ -257,13 +257,13 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
+              expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
               Select Vectorization:
                   className: VectorSelectOperator
                   native: true
-                  projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17]
-                  selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:timestam
 p, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp
+                  projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18]
+                  selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boolean, co
 l 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp
               Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: _col0 (type: bigint)
@@ -274,7 +274,7 @@ STAGE PLANS:
                     nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                     nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
                 Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE
-                value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
+                value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
       Execution mode: vectorized
       Map Vectorization:
           enabled: true
@@ -291,7 +291,7 @@ STAGE PLANS:
           enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
       Reduce Operator Tree:
         Select Operator
-          expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: boolean), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp)
+          expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
           Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
@@ -354,14 +354,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
-1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
-490699811	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
+-45479000681	528	10	27	27	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+1632478712	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
+490725011	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.028	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.028	NULL	1969-12-31 15:59:44.028
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.809	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.809	NULL	1969-12-31 15:59:44.809
@@ -450,7 +450,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+              expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
               Select Vectorization:
                   className: VectorSelectOperator
@@ -531,14 +531,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--2736272726	1883	4	17	17	16	4	14	34
--62018199211	4	9	24	22	39	18	26	29
-1365554626	2013	4	10	10	15	0	43	46
-206730996125	8521	1	16	16	3	20	42	5
-271176065	1978	8	5	5	31	14	41	5
-501179874	1985	11	18	18	47	16	37	54
-501179874	1985	11	18	18	47	16	37	54
-94573819855	4966	12	4	4	49	9	30	55
+-2736243926	1883	4	17	17	16	4	14	34
+-62018170411	4	9	22	22	39	18	26	29
+1365579826	2013	4	10	10	15	0	43	46
+206731024925	8521	1	16	16	3	20	42	5
+271201265	1978	8	5	5	31	14	41	5
+501208674	1985	11	18	18	47	16	37	54
+501208674	1985	11	18	18	47	16	37	54
+94573848655	4966	12	4	4	49	9	30	55
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
@@ -627,7 +627,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
+              expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (dayofmonth(ctimestamp1) = dayofmonth(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
               Select Vectorization:
                   className: VectorSelectOperator
@@ -804,7 +804,7 @@ STAGE PLANS:
             TableScan Vectorization:
                 native: true
             Select Operator
-              expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+              expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
               Select Vectorization:
                   className: VectorSelectOperator
@@ -885,7 +885,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
@@ -1105,7 +1105,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-2.89160478029166E11
+2.89160863229166E11
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(avg(ctimestamp1), 0),
   variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19,
@@ -1239,4 +1239,4 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-3.6145059754E10	false	false	false	7.5245178084814E10	7.5245178084814E10	7.5245178084814E10	8.0440478971476E10
+3.6145107904E10	false	false	false	7.5245155692476E10	7.5245155692476E10	7.5245155692476E10	8.0440455033059E10

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
index 603851d..95f07d9 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
@@ -132,32 +132,32 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1969-12-08 18:43:03.25	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1970-01-19 12:24:39	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:59.97	1969-12-31 23:59:59.8	NULL	1970-01-17 13:10:52.25	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 22:53:20.971	1970-01-13 04:45:23.25	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 15:39:13.882	1969-12-09 15:45:32.75	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:00.02	1970-01-01 00:00:15.601	NULL	1969-12-27 19:19:26.75	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:59.962	1970-01-01 00:00:15.601	NULL	1969-12-10 11:41:51	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:59.995	1970-01-01 00:00:15.601	NULL	1970-01-08 02:06:56	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:00.048	1970-01-01 00:00:15.601	NULL	1969-12-22 19:03:59	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 08:12:58.862	1969-12-21 05:16:47.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-30 19:24:23.566	1969-12-16 19:20:17.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-10 07:39:39.664	1970-01-11 01:09:21.5	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 05:59:27.689	1970-01-19 09:16:31.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 07:29:48.972	1969-12-10 10:41:39	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 18:34:27.246	1970-01-14 22:49:59.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:59.941	1969-12-31 23:59:52.804	NULL	1969-12-13 10:11:50	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:59.979	1969-12-31 23:59:52.804	NULL	1970-01-18 20:27:09	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:59.94	1969-12-31 23:59:52.804	NULL	1970-01-18 13:11:54.75	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:59.986	1969-12-31 23:59:52.804	NULL	1969-12-14 00:50:00.5	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:00.059	1969-12-31 23:59:52.804	NULL	1969-12-18 19:57:25.5	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:59.992	1969-12-31 23:59:52.804	NULL	1969-12-10 14:06:48.5	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:00.005	1969-12-31 23:59:52.804	NULL	1969-12-20 05:53:12.5	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:59.976	1969-12-31 23:59:52.804	NULL	1970-01-10 14:18:31	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:59.95	1969-12-31 23:59:52.804	NULL	1969-12-20 01:33:32.75	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-31 06:03:04.018	1970-01-21 20:50:53.75	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-28 02:49:09.583	1970-01-15 06:35:27	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 14:53:20.971	1970-01-12 20:45:23.25	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 07:39:13.882	1969-12-09 07:45:32.75	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-24 00:12:58.862	1969-12-20 21:16:47.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-30 11:24:23.566	1969-12-16 11:20:17.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-09 23:39:39.664	1970-01-10 17:09:21.5	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-23 21:59:27.689	1970-01-19 01:16:31.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-30 22:03:04.018	1970-01-21 12:50:53.75	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-27 18:49:09.583	1970-01-14 22:35:27	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
 PREHOOK: query: explain vectorization expression
 select
 
@@ -292,29 +292,29 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1906-06-05 21:34:10	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	2020-09-12 02:50:00	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	2015-04-24 05:10:50	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:09	NULL	1994-07-07 17:09:31	2003-05-26 04:27:30	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:09	NULL	1993-09-09 05:51:22	1908-10-29 15:05:50	1969-12-31 23:59:09	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1958-07-08 04:05:50	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1911-02-07 09:30:00	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1989-05-29 03:33:20	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1944-10-18 10:23:20	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:08	NULL	1949-01-13 08:21:02	1940-06-26 23:47:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1966-09-27 14:32:46	1928-05-26 18:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1995-07-08 05:01:04	1997-07-06 03:58:20	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1948-10-12 15:01:29	2020-05-04 11:20:50	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1998-03-27 08:56:12	1910-12-27 14:10:00	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1999-07-01 22:14:06	2008-03-13 09:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1919-02-22 21:13:20	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	2018-11-17 04:30:00	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	2018-01-18 22:32:30	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1920-10-24 17:28:20	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1933-12-12 13:05:00	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1911-05-19 01:28:20	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1937-10-26 06:48:20	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1996-04-10 04:36:40	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1937-04-28 23:05:50	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:11	NULL	1967-12-15 03:06:58	2027-02-19 16:15:50	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:11	NULL	1959-05-16 11:19:43	2009-01-30 14:50:00	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1906-06-05 13:34:10	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	2020-09-11 19:50:00	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	2015-04-23 22:10:50	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:09	NULL	1994-07-07 10:09:31	2003-05-25 21:27:30	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:09	NULL	1993-09-08 22:51:22	1908-10-29 07:05:50	1969-12-31 15:59:09	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1958-07-07 21:05:50	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1911-02-07 01:30:00	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1989-05-28 20:33:20	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1944-10-18 03:23:20	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:08	NULL	1949-01-13 00:21:02	1940-06-26 15:47:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1966-09-27 07:32:46	1928-05-26 10:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1995-07-07 22:01:04	1997-07-05 20:58:20	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1948-10-12 08:01:29	2020-05-04 04:20:50	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1998-03-27 00:56:12	1910-12-27 06:10:00	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1999-07-01 15:14:06	2008-03-13 02:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1919-02-22 13:13:20	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	2018-11-16 20:30:00	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	2018-01-18 14:32:30	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1920-10-24 09:28:20	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1933-12-12 05:05:00	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1911-05-18 17:28:20	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1937-10-25 22:48:20	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1996-04-09 21:36:40	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1937-04-28 15:05:50	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:11	NULL	1967-12-14 19:06:58	2027-02-19 08:15:50	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:11	NULL	1959-05-16 04:19:43	2009-01-30 06:50:00	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/windowing_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/windowing_distinct.q.out b/ql/src/test/results/clientpositive/windowing_distinct.q.out
index 07bc8a1..eab1d88 100644
--- a/ql/src/test/results/clientpositive/windowing_distinct.q.out
+++ b/ql/src/test/results/clientpositive/windowing_distinct.q.out
@@ -96,12 +96,12 @@ FROM windowing_distinct
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@windowing_distinct
 #### A masked pattern was here ####
-54	56.63	0.0	0.0	2.724258237406296E9	57
-54	56.63	0.0	0.0	2.724258237406296E9	57
-54	56.63	0.0	0.0	2.724258237406296E9	57
-235	77.42	0.0	0.0	2.724258237406612E9	69
-235	77.42	0.0	0.0	2.724258237406612E9	69
-235	77.42	0.0	0.0	2.724258237406612E9	69
+54	56.63	0.0	0.0	2.724315837406296E9	57
+54	56.63	0.0	0.0	2.724315837406296E9	57
+54	56.63	0.0	0.0	2.724315837406296E9	57
+235	77.42	0.0	0.0	2.724315837406612E9	69
+235	77.42	0.0	0.0	2.724315837406612E9	69
+235	77.42	0.0	0.0	2.724315837406612E9	69
 PREHOOK: query: SELECT AVG(DISTINCT t) OVER (PARTITION BY index),
        AVG(DISTINCT d) OVER (PARTITION BY index),
        AVG(DISTINCT s) OVER (PARTITION BY index),
@@ -122,12 +122,12 @@ FROM windowing_distinct
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@windowing_distinct
 #### A masked pattern was here ####
-27.0	28.315	NULL	NULL	1.362129118703148E9	28.5000
-27.0	28.315	NULL	NULL	1.362129118703148E9	28.5000
-27.0	28.315	NULL	NULL	1.362129118703148E9	28.5000
-117.5	38.71	NULL	NULL	1.362129118703306E9	34.5000
-117.5	38.71	NULL	NULL	1.362129118703306E9	34.5000
-117.5	38.71	NULL	NULL	1.362129118703306E9	34.5000
+27.0	28.315	NULL	NULL	1.362157918703148E9	28.5000
+27.0	28.315	NULL	NULL	1.362157918703148E9	28.5000
+27.0	28.315	NULL	NULL	1.362157918703148E9	28.5000
+117.5	38.71	NULL	NULL	1.362157918703306E9	34.5000
+117.5	38.71	NULL	NULL	1.362157918703306E9	34.5000
+117.5	38.71	NULL	NULL	1.362157918703306E9	34.5000
 PREHOOK: query: select index, f, count(distinct f) over (partition by index order by f rows between 2 preceding and 1 preceding),
                  count(distinct f) over (partition by index order by f rows between unbounded preceding and 1 preceding),
                  count(distinct f) over (partition by index order by f rows between 1 following and 2 following),

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
index 2bb4a0f..1119fa2 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.serde2;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -32,13 +34,14 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
+import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java b/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java
deleted file mode 100644
index 9360509..0000000
--- a/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2;
-
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.Timestamp;
-
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Random;
-import java.util.concurrent.TimeUnit;
-
-public class RandomTypeUtil {
-
-  public static String getRandString(Random r) {
-    return getRandString(r, null, r.nextInt(10));
-  }
-
-  public static String getRandString(Random r, String characters, int length) {
-    if (characters == null) {
-      characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
-
-    }
-    StringBuilder sb = new StringBuilder();
-    for (int i = 0; i < length; i++) {
-      if (characters == null) {
-        sb.append((char) (r.nextInt(128)));
-      } else {
-        sb.append(characters.charAt(r.nextInt(characters.length())));
-      }
-    }
-    return sb.toString();
-  }
-
-  public static byte[] getRandBinary(Random r, int len){
-    byte[] bytes = new byte[len];
-    for (int j = 0; j < len; j++){
-      bytes[j] = Byte.valueOf((byte) r.nextInt());
-    }
-    return bytes;
-  }
-
-  private static final String DECIMAL_CHARS = "0123456789";
-
-  public static HiveDecimal getRandHiveDecimal(Random r) {
-    int precision;
-    int scale;
-    while (true) {
-      StringBuilder sb = new StringBuilder();
-      precision = 1 + r.nextInt(18);
-      scale = 0 + r.nextInt(precision + 1);
-
-      int integerDigits = precision - scale;
-
-      if (r.nextBoolean()) {
-        sb.append("-");
-      }
-
-      if (integerDigits == 0) {
-        sb.append("0");
-      } else {
-        sb.append(getRandString(r, DECIMAL_CHARS, integerDigits));
-      }
-      if (scale != 0) {
-        sb.append(".");
-        sb.append(getRandString(r, DECIMAL_CHARS, scale));
-      }
-
-      return HiveDecimal.create(sb.toString());
-    }
-  }
-
-  public static Date getRandDate(Random r) {
-    String dateStr = String.format("%d-%02d-%02d",
-        Integer.valueOf(1800 + r.nextInt(500)),  // year
-        Integer.valueOf(1 + r.nextInt(12)),      // month
-        Integer.valueOf(1 + r.nextInt(28)));     // day
-    Date dateVal = Date.valueOf(dateStr);
-    return dateVal;
-  }
-
-  /**
-   * TIMESTAMP.
-   */
-
-  public static final long NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1);
-  public static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
-  public static final long NANOSECONDS_PER_MILLISSECOND = TimeUnit.MILLISECONDS.toNanos(1);
-
-  private static final ThreadLocal<DateFormat> DATE_FORMAT =
-      new ThreadLocal<DateFormat>() {
-        @Override
-        protected DateFormat initialValue() {
-          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-        }
-      };
-
-  // We've switched to Joda/Java Calendar which has a more limited time range....
-  public static final int MIN_YEAR = 1900;
-  public static final int MAX_YEAR = 3000;
-  private static final long MIN_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("1900-01-01 00:00:00");
-  private static final long MAX_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("3000-01-01 00:00:00");
-
-  private static long parseToMillis(String s) {
-    try {
-      return DATE_FORMAT.get().parse(s).getTime();
-    } catch (ParseException ex) {
-      throw new RuntimeException(ex);
-    }
-  }
-
-  public static Timestamp getRandTimestamp(Random r) {
-    return getRandTimestamp(r, MIN_YEAR, MAX_YEAR);
-  }
-
-  public static Timestamp getRandTimestamp(Random r, int minYear, int maxYear) {
-    String optionalNanos = "";
-    switch (r.nextInt(4)) {
-    case 0:
-      // No nanos.
-      break;
-    case 1:
-      optionalNanos = String.format(".%09d",
-          Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_SECOND)));
-      break;
-    case 2:
-      // Limit to milliseconds only...
-      optionalNanos = String.format(".%09d",
-          Integer.valueOf(r.nextInt((int) MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISSECOND);
-      break;
-    case 3:
-      // Limit to below milliseconds only...
-      optionalNanos = String.format(".%09d",
-          Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISSECOND)));
-      break;
-    }
-    String timestampStr = String.format("%04d-%02d-%02d %02d:%02d:%02d%s",
-        Integer.valueOf(minYear + r.nextInt(maxYear - minYear + 1)),  // year
-        Integer.valueOf(1 + r.nextInt(12)),      // month
-        Integer.valueOf(1 + r.nextInt(28)),      // day
-        Integer.valueOf(0 + r.nextInt(24)),      // hour
-        Integer.valueOf(0 + r.nextInt(60)),      // minute
-        Integer.valueOf(0 + r.nextInt(60)),      // second
-        optionalNanos);
-    Timestamp timestampVal;
-    try {
-      timestampVal = Timestamp.valueOf(timestampStr);
-    } catch (Exception e) {
-      System.err.println("Timestamp string " + timestampStr + " did not parse");
-      throw e;
-    }
-    return timestampVal;
-  }
-
-  public static long randomMillis(long minMillis, long maxMillis, Random rand) {
-    return minMillis + (long) ((maxMillis - minMillis) * rand.nextDouble());
-  }
-
-  public static long randomMillis(Random rand) {
-    return randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
-  }
-
-  public static int randomNanos(Random rand, int decimalDigits) {
-    // Only keep the most significant decimalDigits digits.
-    int nanos = rand.nextInt((int) NANOSECONDS_PER_SECOND);
-    return nanos - nanos % (int) Math.pow(10, 9 - decimalDigits);
-  }
-
-  public static int randomNanos(Random rand) {
-    return randomNanos(rand, 9);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
index 71a9cfc..e1ecdc1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.serde2;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -24,8 +26,6 @@ import java.util.Properties;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index 8cdc567..34da50d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.rmi.server.UID;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -40,14 +42,12 @@ import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DecoderFactory;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.UnresolvedUnionException;
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -295,13 +295,13 @@ class AvroDeserializer {
         throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
       }
 
-      return Date.ofEpochMilli(DateWritableV2.daysToMillis((Integer)datum));
+      return new Date(DateWritable.daysToMillis((Integer)datum));
     case TIMESTAMP:
       if (recordSchema.getType() != Type.LONG) {
         throw new AvroSerdeException(
           "Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
       }
-      return Timestamp.ofEpochMilli((Long)datum);
+      return new Timestamp((Long)datum);
     default:
       return datum;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index 99a0b9a..b4c9c22 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.serde2.avro;
 
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -26,15 +28,16 @@ import java.util.Set;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericArray;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericData.Fixed;
 import org.apache.avro.generic.GenericEnumSymbol;
-import org.apache.hadoop.hive.common.type.Date;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -207,11 +210,11 @@ class AvroSerializer {
       return vc.getValue();
     case DATE:
       Date date = ((DateObjectInspector)fieldOI).getPrimitiveJavaObject(structFieldData);
-      return DateWritableV2.dateToDays(date);
+      return DateWritable.dateToDays(date);
     case TIMESTAMP:
       Timestamp timestamp =
         ((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData);
-      return timestamp.toEpochMilli();
+      return timestamp.getTime();
     case UNKNOWN:
       throw new AvroSerdeException("Received UNKNOWN primitive category.");
     case VOID:

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index f58fb72..a48d4fe 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -43,7 +43,7 @@ import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -52,7 +52,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -407,16 +407,16 @@ public class BinarySortableSerDe extends AbstractSerDe {
       }
 
       case DATE: {
-        DateWritableV2 d = reuse == null ? new DateWritableV2()
-            : (DateWritableV2) reuse;
+        DateWritable d = reuse == null ? new DateWritable()
+            : (DateWritable) reuse;
         d.set(deserializeInt(buffer, invert));
         return d;
       }
 
       case TIMESTAMP:
-        TimestampWritableV2 t = (reuse == null ? new TimestampWritableV2() :
-            (TimestampWritableV2) reuse);
-        byte[] bytes = new byte[TimestampWritableV2.BINARY_SORTABLE_LENGTH];
+        TimestampWritable t = (reuse == null ? new TimestampWritable() :
+            (TimestampWritable) reuse);
+        byte[] bytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
 
         for (int i = 0; i < bytes.length; i++) {
           bytes[i] = buffer.read(invert);
@@ -797,7 +797,7 @@ public class BinarySortableSerDe extends AbstractSerDe {
       }
       case TIMESTAMP: {
         TimestampObjectInspector toi = (TimestampObjectInspector) poi;
-        TimestampWritableV2 t = toi.getPrimitiveWritableObject(o);
+        TimestampWritable t = toi.getPrimitiveWritableObject(o);
         serializeTimestampWritable(buffer, t, invert);
         return;
       }
@@ -970,7 +970,7 @@ public class BinarySortableSerDe extends AbstractSerDe {
     writeByte(buffer, (byte) v, invert);
   }
 
-  public static void serializeTimestampWritable(ByteStream.Output buffer, TimestampWritableV2 t, boolean invert) {
+  public static void serializeTimestampWritable(ByteStream.Output buffer, TimestampWritable t, boolean invert) {
     byte[] data = t.getBinarySortable();
     for (int i = 0; i < data.length; i++) {
       writeByte(buffer, data[i], invert);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
index 62f59af..461043d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableDeserializeRead.java
@@ -35,7 +35,7 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.binarysortable.InputByteBuffer;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -279,7 +279,7 @@ public final class BinarySortableDeserializeRead extends DeserializeRead {
     case TIMESTAMP:
       {
         if (tempTimestampBytes == null) {
-          tempTimestampBytes = new byte[TimestampWritableV2.BINARY_SORTABLE_LENGTH];
+          tempTimestampBytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];
         }
         final boolean invert = columnSortOrderIsDesc[fieldIndex];
         for (int i = 0; i < tempTimestampBytes.length; i++) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index b1ee7ec..2f987bf 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -19,23 +19,23 @@
 package org.apache.hadoop.hive.serde2.binarysortable.fast;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -60,7 +60,7 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
   private int index;
   private int level;
 
-  private TimestampWritableV2 tempTimestampWritable;
+  private TimestampWritable tempTimestampWritable;
   private HiveDecimalWritable hiveDecimalWritable;
   private byte[] decimalBytesScratch;
 
@@ -88,7 +88,7 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
 
   // Not public since we must have the field count or column sort order information.
   private BinarySortableSerializeWrite() {
-    tempTimestampWritable = new TimestampWritableV2();
+    tempTimestampWritable = new TimestampWritable();
   }
 
   /*
@@ -262,7 +262,7 @@ public final class BinarySortableSerializeWrite implements SerializeWrite {
   @Override
   public void writeDate(Date date) throws IOException {
     beginElement();
-    BinarySortableSerDe.serializeInt(output, DateWritableV2.dateToDays(date), columnSortOrderIsDesc[index]);
+    BinarySortableSerDe.serializeInt(output, DateWritable.dateToDays(date), columnSortOrderIsDesc[index]);
   }
 
   // We provide a faster way to write a date without a Date object.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
index 2c9aaa3..197031d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
@@ -22,11 +22,11 @@ import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -75,12 +75,12 @@ public abstract class DeserializeRead {
       switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
       case DATE:
         if (currentDateWritable == null) {
-          currentDateWritable = new DateWritableV2();
+          currentDateWritable = new DateWritable();
         }
         break;
       case TIMESTAMP:
         if (currentTimestampWritable == null) {
-          currentTimestampWritable = new TimestampWritableV2();
+          currentTimestampWritable = new TimestampWritable();
         }
         break;
       case INTERVAL_YEAR_MONTH:
@@ -343,12 +343,12 @@ public abstract class DeserializeRead {
   /*
    * DATE.
    */
-  public DateWritableV2 currentDateWritable;
+  public DateWritable currentDateWritable;
 
   /*
    * TIMESTAMP.
    */
-  public TimestampWritableV2 currentTimestampWritable;
+  public TimestampWritable currentTimestampWritable;
 
   /*
    * INTERVAL_YEAR_MONTH.

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
index 4d4717f..3aff610 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
@@ -19,17 +19,17 @@
 package org.apache.hadoop.hive.serde2.fast;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 
 /*

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java
deleted file mode 100644
index ba77608..0000000
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2.io;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableUtils;
-
-
-/**
- * DateWritableV2
- * Writable equivalent of java.sql.Date.
- *
- * Dates are of the format
- *    YYYY-MM-DD
- *
- */
-public class DateWritableV2 implements WritableComparable<DateWritableV2> {
-
-  private Date date = new Date();
-
-  /* Constructors */
-  public DateWritableV2() {
-  }
-
-  public DateWritableV2(DateWritableV2 d) {
-    set(d);
-  }
-
-  public DateWritableV2(Date d) {
-    set(d);
-  }
-
-  public DateWritableV2(int d) {
-    set(d);
-  }
-
-  /**
-   * Set the DateWritableV2 based on the days since epoch date.
-   * @param d integer value representing days since epoch date
-   */
-  public void set(int d) {
-    date = Date.ofEpochDay(d);
-  }
-
-  /**
-   * Set the DateWritableV2 based on the year/month/day of the date in the local timezone.
-   * @param d Date value
-   */
-  public void set(Date d) {
-    if (d == null) {
-      date = new Date();
-      return;
-    }
-
-    set(d.toEpochDay());
-  }
-
-  public void set(DateWritableV2 d) {
-    set(d.getDays());
-  }
-
-  /**
-   * @return Date value corresponding to the date in the local time zone
-   */
-  public Date get() {
-    return date;
-  }
-
-  public int getDays() {
-    return date.toEpochDay();
-  }
-
-  /**
-   *
-   * @return time in seconds corresponding to this DateWritableV2
-   */
-  public long getTimeInSeconds() {
-    return date.toEpochSecond();
-  }
-
-  public static Date timeToDate(long seconds) {
-    return Date.ofEpochMilli(seconds * 1000);
-  }
-
-  public static long daysToMillis(int days) {
-    return Date.ofEpochDay(days).toEpochMilli();
-  }
-
-  public static int millisToDays(long millis) {
-    return Date.ofEpochMilli(millis).toEpochDay();
-  }
-
-  public static int dateToDays(Date d) {
-    return d.toEpochDay();
-  }
-
-  @Deprecated
-  public static int dateToDays(java.sql.Date d) {
-    return Date.ofEpochMilli(d.getTime()).toEpochDay();
-  }
-
-  @Override
-  public void readFields(DataInput in) throws IOException {
-    date.setTimeInDays(WritableUtils.readVInt(in));
-  }
-
-  @Override
-  public void write(DataOutput out) throws IOException {
-    WritableUtils.writeVInt(out, (int) date.toEpochDay());
-  }
-
-  @Override
-  public int compareTo(DateWritableV2 d) {
-    return date.compareTo(d.date);
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (!(o instanceof DateWritableV2)) {
-      return false;
-    }
-    return compareTo((DateWritableV2) o) == 0;
-  }
-
-  @Override
-  public String toString() {
-    return date.toString();
-  }
-
-  @Override
-  public int hashCode() {
-    return date.hashCode();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampLocalTZWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampLocalTZWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampLocalTZWritable.java
index 3ffcb7a..e685f4e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampLocalTZWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampLocalTZWritable.java
@@ -31,7 +31,7 @@ import java.time.ZoneId;
 import java.util.Arrays;
 
 /**
- * Writable for TimestampTZ. Copied from TimestampWritableV2.
+ * Writable for TimestampTZ. Copied from TimestampWritable.
  * After we replace {@link java.sql.Timestamp} with {@link java.time.LocalDateTime} for Timestamp,
  * it'll need a new Writable.
  * All timestamp with time zone will be serialized as UTC retaining the instant.
@@ -45,7 +45,7 @@ public class TimestampLocalTZWritable implements WritableComparable<TimestampLoc
   private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48; // only need flip the MSB?
 
   /**
-   * The maximum number of bytes required for a TimestampWritableV2
+   * The maximum number of bytes required for a TimestampWritable
    */
   public static final int MAX_BYTES = 13;
 


[24/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/infer_join_preds.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_join_preds.q.out b/ql/src/test/results/clientpositive/infer_join_preds.q.out
index 0dcbbb4..d8163aa 100644
--- a/ql/src/test/results/clientpositive/infer_join_preds.q.out
+++ b/ql/src/test/results/clientpositive/infer_join_preds.q.out
@@ -1177,7 +1177,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38
           Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
-            predicate: (concat(CASE WHEN (_col1 is null) THEN (1) ELSE (_col1) END, ',', CASE WHEN (_col2 is null) THEN (1) ELSE (_col2) END, ',', CASE WHEN (_col3 is null) THEN (1) ELSE (_col3) END, ',', CASE WHEN (_col4 is null) THEN (1) ELSE (_col4) END, ',', CASE WHEN (_col5 is null) THEN ('') ELSE (_col5) END, ',', CASE WHEN (_col18 is null) THEN (1) ELSE (_col18) END, ',', CASE WHEN (_col6 is null) THEN (1) ELSE (_col6) END, ',', CASE WHEN (length(_col7) is null) THEN ('') ELSE (_col7) END, ',', CASE WHEN (_col8 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col8) END, ',', CASE WHEN (_col9 is null) THEN (1) ELSE (_col9) END, ',', CASE WHEN (_col10 is null) THEN (1) ELSE (_col10) END, ',', CASE WHEN (_col11 is null) THEN (1) ELSE (_col11) END, ',', CASE WHEN (_col12 is null) THEN (1) ELSE (_col12) END, ',', CASE WHEN (length(_col13) is null) THEN ('') ELSE (_col13) END, ',', CASE WHEN (length(_col14) is null) THEN ('') ELSE (_col14) END, ',', CASE WHEN (_col15 is null) 
 THEN (1) ELSE (_col15) END, ',', CASE WHEN (_col16 is null) THEN (1) ELSE (_col16) END, ',', CASE WHEN (_col17 is null) THEN (1) ELSE (_col17) END) <> concat(CASE WHEN (length(_col20) is null) THEN ('') ELSE (_col20) END, ',', CASE WHEN (_col21 is null) THEN (1) ELSE (_col21) END, ',', CASE WHEN (_col22 is null) THEN (1) ELSE (_col22) END, ',', CASE WHEN (_col23 is null) THEN (1) ELSE (_col23) END, ',', CASE WHEN (_col24 is null) THEN (1) ELSE (_col24) END, ',', CASE WHEN (_col25 is null) THEN ('') ELSE (_col25) END, ',', CASE WHEN (_col38 is null) THEN (1) ELSE (_col38) END, ',', CASE WHEN (_col26 is null) THEN (1) ELSE (_col26) END, ',', CASE WHEN (length(_col27) is null) THEN ('') ELSE (_col27) END, ',', CASE WHEN (_col28 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col28) END, ',', CASE WHEN (_col29 is null) THEN (1) ELSE (_col29) END, ',', CASE WHEN (_col30 is null) THEN (1) ELSE (_col30) END, ',', CASE WHEN (_col31 is null) THEN (1) ELSE (_col31) END, ',', CASE WHEN (
 _col32 is null) THEN (1) ELSE (_col32) END, ',', CASE WHEN (length(_col33) is null) THEN ('') ELSE (_col33) END, ',', CASE WHEN (length(_col34) is null) THEN ('') ELSE (_col34) END, ',', CASE WHEN (_col35 is null) THEN (1) ELSE (_col35) END, ',', CASE WHEN (_col36 is null) THEN (1) ELSE (_col36) END, ',', CASE WHEN (_col37 is null) THEN (1) ELSE (_col37) END)) (type: boolean)
+            predicate: (concat(CASE WHEN (_col1 is null) THEN (1) ELSE (_col1) END, ',', CASE WHEN (_col2 is null) THEN (1) ELSE (_col2) END, ',', CASE WHEN (_col3 is null) THEN (1) ELSE (_col3) END, ',', CASE WHEN (_col4 is null) THEN (1) ELSE (_col4) END, ',', CASE WHEN (_col5 is null) THEN ('') ELSE (_col5) END, ',', CASE WHEN (_col18 is null) THEN (1) ELSE (_col18) END, ',', CASE WHEN (_col6 is null) THEN (1) ELSE (_col6) END, ',', CASE WHEN (length(_col7) is null) THEN ('') ELSE (_col7) END, ',', CASE WHEN (_col8 is null) THEN (TIMESTAMP'2017-12-08 00:00:00.0') ELSE (_col8) END, ',', CASE WHEN (_col9 is null) THEN (1) ELSE (_col9) END, ',', CASE WHEN (_col10 is null) THEN (1) ELSE (_col10) END, ',', CASE WHEN (_col11 is null) THEN (1) ELSE (_col11) END, ',', CASE WHEN (_col12 is null) THEN (1) ELSE (_col12) END, ',', CASE WHEN (length(_col13) is null) THEN ('') ELSE (_col13) END, ',', CASE WHEN (length(_col14) is null) THEN ('') ELSE (_col14) END, ',', CASE WHEN (_col15 is null
 ) THEN (1) ELSE (_col15) END, ',', CASE WHEN (_col16 is null) THEN (1) ELSE (_col16) END, ',', CASE WHEN (_col17 is null) THEN (1) ELSE (_col17) END) <> concat(CASE WHEN (length(_col20) is null) THEN ('') ELSE (_col20) END, ',', CASE WHEN (_col21 is null) THEN (1) ELSE (_col21) END, ',', CASE WHEN (_col22 is null) THEN (1) ELSE (_col22) END, ',', CASE WHEN (_col23 is null) THEN (1) ELSE (_col23) END, ',', CASE WHEN (_col24 is null) THEN (1) ELSE (_col24) END, ',', CASE WHEN (_col25 is null) THEN ('') ELSE (_col25) END, ',', CASE WHEN (_col38 is null) THEN (1) ELSE (_col38) END, ',', CASE WHEN (_col26 is null) THEN (1) ELSE (_col26) END, ',', CASE WHEN (length(_col27) is null) THEN ('') ELSE (_col27) END, ',', CASE WHEN (_col28 is null) THEN (TIMESTAMP'2017-12-08 00:00:00.0') ELSE (_col28) END, ',', CASE WHEN (_col29 is null) THEN (1) ELSE (_col29) END, ',', CASE WHEN (_col30 is null) THEN (1) ELSE (_col30) END, ',', CASE WHEN (_col31 is null) THEN (1) ELSE (_col31) END, ',', CASE WH
 EN (_col32 is null) THEN (1) ELSE (_col32) END, ',', CASE WHEN (length(_col33) is null) THEN ('') ELSE (_col33) END, ',', CASE WHEN (length(_col34) is null) THEN ('') ELSE (_col34) END, ',', CASE WHEN (_col35 is null) THEN (1) ELSE (_col35) END, ',', CASE WHEN (_col36 is null) THEN (1) ELSE (_col36) END, ',', CASE WHEN (_col37 is null) THEN (1) ELSE (_col37) END)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: _col0 (type: bigint)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/interval_alt.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/interval_alt.q.out b/ql/src/test/results/clientpositive/interval_alt.q.out
index ca645b9..a8f7292 100644
--- a/ql/src/test/results/clientpositive/interval_alt.q.out
+++ b/ql/src/test/results/clientpositive/interval_alt.q.out
@@ -139,7 +139,7 @@ STAGE PLANS:
             alias: t_n18
             Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: (DATE'2012-01-01' + IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), (DATE'2012-01-01' - IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), TIMESTAMP'2012-01-04 00:00:00' (type: timestamp), (DATE'2012-01-01' + IntervalYearMonthLiteralProcessor(concat(dt, '-1'))) (type: date)
+              expressions: (DATE'2012-01-01' + IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), (DATE'2012-01-01' - IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), TIMESTAMP'2012-01-04 00:00:00.0' (type: timestamp), (DATE'2012-01-01' + IntervalYearMonthLiteralProcessor(concat(dt, '-1'))) (type: date)
               outputColumnNames: _col0, _col1, _col2, _col3
               Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE
               File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/interval_arithmetic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/interval_arithmetic.q.out b/ql/src/test/results/clientpositive/interval_arithmetic.q.out
index 819f1fe..7cb7270 100644
--- a/ql/src/test/results/clientpositive/interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/interval_arithmetic.q.out
@@ -174,7 +174,7 @@ limit 2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@interval_arithmetic_1_n0
 #### A masked pattern was here ####
-1969-12-31	-10750 00:00:00.000000000	10750 00:00:00.000000000	0 00:00:00.000000000
+1969-12-31	-10749 23:00:00.000000000	10749 23:00:00.000000000	0 00:00:00.000000000
 NULL	NULL	NULL	NULL
 PREHOOK: query: explain
 select
@@ -396,7 +396,7 @@ limit 2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@interval_arithmetic_1_n0
 #### A masked pattern was here ####
-1969-12-31	1969-09-22 12:37:26.876543211	1970-04-09 11:22:33.123456789	1970-04-09 11:22:33.123456789	1969-09-22 12:37:26.876543211	1969-09-22 12:37:26.876543211	1970-04-09 11:22:33.123456789
+1969-12-31	1969-09-22 13:37:26.876543211	1970-04-09 11:22:33.123456789	1970-04-09 11:22:33.123456789	1969-09-22 13:37:26.876543211	1969-09-22 13:37:26.876543211	1970-04-09 11:22:33.123456789
 NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: explain
 select
@@ -558,7 +558,7 @@ limit 2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@interval_arithmetic_1_n0
 #### A masked pattern was here ####
-1969-12-31 15:59:46.674	1969-09-23 04:37:13.550543211	1970-04-10 03:22:19.797456789	1970-04-10 03:22:19.797456789	1969-09-23 04:37:13.550543211	1969-09-23 04:37:13.550543211	1970-04-10 03:22:19.797456789
+1969-12-31 15:59:46.674	1969-09-23 05:37:13.550543211	1970-04-10 03:22:19.797456789	1970-04-10 03:22:19.797456789	1969-09-23 05:37:13.550543211	1969-09-23 05:37:13.550543211	1970-04-10 03:22:19.797456789
 NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: explain
 select
@@ -630,7 +630,7 @@ STAGE PLANS:
           alias: interval_arithmetic_1_n0
           Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: TIMESTAMP'2016-11-11 03:04:00' (type: timestamp)
+            expressions: TIMESTAMP'2016-11-11 03:04:00.0' (type: timestamp)
             outputColumnNames: _col0
             Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
             Limit

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
index 028c3ca..be1b4c6 100644
--- a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
+++ b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
@@ -665,22 +665,22 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: over10k_orc_bucketed
-                  Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                     outputColumnNames: ROW__ID
-                    Statistics: Num rows: 1234 Data size: 706090 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1237 Data size: 707670 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       keys: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                        Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: bigint)
             Execution mode: llap
             LLAP IO: may be used (ACID table)
@@ -692,13 +692,13 @@ STAGE PLANS:
                 keys: KEY._col0 (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 617 Data size: 51828 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 618 Data size: 51912 Basic stats: COMPLETE Column stats: COMPLETE
                 Filter Operator
                   predicate: (_col1 > 1L) (type: boolean)
-                  Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 205 Data size: 17220 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 206 Data size: 17304 Basic stats: COMPLETE Column stats: COMPLETE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/current_date_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/current_date_timestamp.q.out b/ql/src/test/results/clientpositive/llap/current_date_timestamp.q.out
index 083aab3..0052f58 100644
--- a/ql/src/test/results/clientpositive/llap/current_date_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/llap/current_date_timestamp.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
           alias: alltypesorc
           GatherStats: false
           Select Operator
-            expressions: TIMESTAMP'2012-01-01 09:02:03' (type: timestamp)
+            expressions: TIMESTAMP'2012-01-01 01:02:03.0' (type: timestamp)
             outputColumnNames: _col0
             ListSink
 
@@ -228,11 +228,11 @@ POSTHOOK: query: select unix_timestamp(current_timestamp()),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1325408523	9	2	3
-1325408523	9	2	3
-1325408523	9	2	3
-1325408523	9	2	3
-1325408523	9	2	3
+1325408523	1	2	3
+1325408523	1	2	3
+1325408523	1	2	3
+1325408523	1	2	3
+1325408523	1	2	3
 PREHOOK: query: select to_date(current_timestamp()),
                            year(current_timestamp()),
                            month(current_timestamp()),
@@ -315,7 +315,7 @@ POSTHOOK: query: select current_timestamp() - current_timestamp(),
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-0 00:00:00.000000000	0 09:02:03.000000000	-0 09:02:03.000000000	0 00:00:00.000000000
+0 00:00:00.000000000	0 01:02:03.000000000	-0 01:02:03.000000000	0 00:00:00.000000000
 PREHOOK: query: select ctimestamp1 - current_date(),
         ctimestamp1- ctimestamp2,
         current_date() - current_date(),
@@ -338,11 +338,11 @@ where ctimestamp1 is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--15340 08:00:12.326000000	-0 00:00:22.201000000	0 00:00:00.000000000	15340 07:59:50.125000000
--15340 08:00:03.213000000	-0 00:00:05.759000000	0 00:00:00.000000000	15340 07:59:57.454000000
--15340 08:00:14.813000000	-0 00:00:22.774000000	0 00:00:00.000000000	15340 07:59:52.039000000
--15340 08:00:08.566000000	-0 00:00:22.918000000	0 00:00:00.000000000	15340 07:59:45.648000000
--15340 07:59:43.993000000	-0 00:00:00.141000000	0 00:00:00.000000000	15340 07:59:43.852000000
+-15340 08:00:12.326000000	-0 00:00:21.201000000	0 00:00:00.000000000	15340 07:59:51.125000000
+-15340 08:00:03.213000000	-0 00:00:04.759000000	0 00:00:00.000000000	15340 07:59:58.454000000
+-15340 08:00:14.813000000	-0 00:00:21.774000000	0 00:00:00.000000000	15340 07:59:53.039000000
+-15340 08:00:08.566000000	-0 00:00:21.918000000	0 00:00:00.000000000	15340 07:59:46.648000000
+-15340 07:59:44.993000000	-0 00:00:00.141000000	0 00:00:00.000000000	15340 07:59:44.852000000
 PREHOOK: query: select current_date, current_timestamp from src limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -351,11 +351,11 @@ POSTHOOK: query: select current_date, current_timestamp from src limit 5
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-2012-01-01	2012-01-01 09:02:03
-2012-01-01	2012-01-01 09:02:03
-2012-01-01	2012-01-01 09:02:03
-2012-01-01	2012-01-01 09:02:03
-2012-01-01	2012-01-01 09:02:03
+2012-01-01	2012-01-01 01:02:03
+2012-01-01	2012-01-01 01:02:03
+2012-01-01	2012-01-01 01:02:03
+2012-01-01	2012-01-01 01:02:03
+2012-01-01	2012-01-01 01:02:03
 PREHOOK: query: select `[kv]+.+` from srcpart order by key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/default_constraint.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/default_constraint.q.out b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
index f93eb1f..775cba8 100644
--- a/ql/src/test/results/clientpositive/llap/default_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
@@ -440,7 +440,7 @@ STAGE PLANS:
                       Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                       function name: inline
                       Select Operator
-                        expressions: CAST( col1 AS DATE) (type: date), TIMESTAMP'2016-02-22 12:45:07' (type: timestamp), TIMESTAMPLOCALTZ'2016-01-03 12:26:34.0 US/Pacific' (type: timestamp with local time zone), CURRENT_DATE() (type: date), CAST( col2 AS TIMESTAMP) (type: timestamp)
+                        expressions: CAST( col1 AS DATE) (type: date), TIMESTAMP'2016-02-22 12:45:07.0' (type: timestamp), TIMESTAMPLOCALTZ'2016-01-03 12:26:34.0 US/Pacific' (type: timestamp with local time zone), CURRENT_DATE() (type: date), CAST( col2 AS TIMESTAMP) (type: timestamp)
                         outputColumnNames: _col0, _col1, _col2, _col3, _col4
                         Statistics: Num rows: 1 Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE
                         File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
index b9530fa..1c0841c 100644
--- a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
+++ b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
@@ -170,7 +170,7 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	bucketing_version   	2                   
 	numFiles            	1                   
-	totalSize           	295638              
+	totalSize           	295583              
 	transactional       	true                
 	transactional_properties	default             
 #### A masked pattern was here ####
@@ -205,9 +205,9 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: acid_ivot
-                  Statistics: Num rows: 5865 Data size: 2956380 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 5864 Data size: 2955830 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    Statistics: Num rows: 5865 Data size: 2956380 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 5864 Data size: 2955830 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       mode: hash
@@ -376,7 +376,7 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	bucketing_version   	2                   
 	numFiles            	1                   
-	totalSize           	1652                
+	totalSize           	1663                
 	transactional       	true                
 	transactional_properties	default             
 #### A masked pattern was here ####
@@ -411,9 +411,9 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: acid_ivot
-                  Statistics: Num rows: 32 Data size: 16520 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 32 Data size: 16630 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    Statistics: Num rows: 32 Data size: 16520 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 32 Data size: 16630 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       mode: hash
@@ -509,7 +509,7 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	bucketing_version   	2                   
 	numFiles            	2                   
-	totalSize           	3304                
+	totalSize           	3326                
 	transactional       	true                
 	transactional_properties	default             
 #### A masked pattern was here ####
@@ -544,9 +544,9 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: acid_ivot
-                  Statistics: Num rows: 65 Data size: 33040 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 65 Data size: 33260 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    Statistics: Num rows: 65 Data size: 33040 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 65 Data size: 33260 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       mode: hash
@@ -638,7 +638,7 @@ Table Type:         	MANAGED_TABLE
 Table Parameters:	 	 
 	bucketing_version   	2                   
 	numFiles            	3                   
-	totalSize           	298943              
+	totalSize           	298909              
 	transactional       	true                
 	transactional_properties	default             
 #### A masked pattern was here ####
@@ -673,9 +673,9 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: acid_ivot
-                  Statistics: Num rows: 5931 Data size: 2989430 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 5930 Data size: 2989090 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
-                    Statistics: Num rows: 5931 Data size: 2989430 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 5930 Data size: 2989090 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       mode: hash

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out
index fb90b60..6900cdb 100644
--- a/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out
+++ b/ql/src/test/results/clientpositive/llap/llap_uncompressed.q.out
@@ -162,7 +162,7 @@ POSTHOOK: query: select sum(hash(*)) from llap_temp_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@llap_temp_table
 #### A masked pattern was here ####
-251958877792
+212787774304
 PREHOOK: query: explain
 select * from orc_llap_n0 where cint > 10 and cint < 5000000
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
index 25426bf..14bf186 100644
--- a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
@@ -102,7 +102,7 @@ Table Parameters:
 	numFiles            	1                   
 	numRows             	100                 
 	rawDataSize         	52600               
-	totalSize           	3222                
+	totalSize           	3236                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -150,7 +150,7 @@ Table Parameters:
 	numFiles            	1                   
 	numRows             	100                 
 	rawDataSize         	52600               
-	totalSize           	3222                
+	totalSize           	3236                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -237,7 +237,7 @@ Table Parameters:
 	numFiles            	1                   
 	numRows             	100                 
 	rawDataSize         	52600               
-	totalSize           	3222                
+	totalSize           	3236                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -345,7 +345,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	2123                
+	totalSize           	2134                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -386,7 +386,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	22050               
-	totalSize           	2134                
+	totalSize           	2147                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -439,7 +439,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	2123                
+	totalSize           	2134                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -480,7 +480,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	22050               
-	totalSize           	2134                
+	totalSize           	2147                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -576,7 +576,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	2123                
+	totalSize           	2134                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -617,7 +617,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	22050               
-	totalSize           	2134                
+	totalSize           	2147                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -731,7 +731,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	21955               
-	totalSize           	5341                
+	totalSize           	5394                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -772,7 +772,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	22043               
-	totalSize           	5339                
+	totalSize           	5388                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -825,7 +825,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	21955               
-	totalSize           	5341                
+	totalSize           	5394                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -866,7 +866,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	22043               
-	totalSize           	5339                
+	totalSize           	5388                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -968,7 +968,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	21955               
-	totalSize           	5341                
+	totalSize           	5394                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -1009,7 +1009,7 @@ Partition Parameters:
 	numFiles            	4                   
 	numRows             	50                  
 	rawDataSize         	22043               
-	totalSize           	5339                
+	totalSize           	5388                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -1117,7 +1117,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	2123                
+	totalSize           	2134                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 
@@ -1170,7 +1170,7 @@ Partition Parameters:
 	numFiles            	1                   
 	numRows             	50                  
 	rawDataSize         	21950               
-	totalSize           	2123                
+	totalSize           	2134                
 #### A masked pattern was here ####
 	 	 
 # Storage Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_llap.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_llap.q.out b/ql/src/test/results/clientpositive/llap/orc_llap.q.out
index 7f8667e..9235fbc 100644
--- a/ql/src/test/results/clientpositive/llap/orc_llap.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_llap.q.out
@@ -379,7 +379,7 @@ POSTHOOK: query: select sum(hash(*)) from (select * from orc_llap where cint > 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_llap
 #### A masked pattern was here ####
-258089178813
+-197609091139
 PREHOOK: query: explain
 select sum(hash(*)) from (select cstring2 from orc_llap where cint > 5 and cint < 10) t
 PREHOOK: type: QUERY
@@ -837,7 +837,7 @@ POSTHOOK: query: select sum(hash(*)) from (select * from orc_llap where cint > 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_llap
 #### A masked pattern was here ####
-516178357626
+-395218182278
 PREHOOK: query: explain
 select sum(hash(*)) from (select cstring2 from orc_llap where cint > 5 and cint < 10) t
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
index 5327299..8e7840c 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
@@ -96,7 +96,7 @@ File Statistics:
   Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -155,7 +155,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 6672 bytes
+File length: 6685 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -187,7 +187,7 @@ File Statistics:
   Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -246,7 +246,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 6672 bytes
+File length: 6685 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -306,7 +306,7 @@ File Statistics:
   Column 5: count: 100000 hasNull: false bytesOnDisk: 128 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -364,22 +364,22 @@ Stripes:
       Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
-  Stripe: offset: 6296 data: 5761 rows: 50000 tail: 99 index: 433
-    Stream: column 0 section ROW_INDEX start: 6296 length 17
-    Stream: column 1 section ROW_INDEX start: 6313 length 73
-    Stream: column 2 section ROW_INDEX start: 6386 length 79
-    Stream: column 3 section ROW_INDEX start: 6465 length 85
-    Stream: column 4 section ROW_INDEX start: 6550 length 92
-    Stream: column 5 section ROW_INDEX start: 6642 length 87
-    Stream: column 1 section DATA start: 6729 length 30
-    Stream: column 2 section DATA start: 6759 length 24
-    Stream: column 2 section LENGTH start: 6783 length 8
-    Stream: column 2 section DICTIONARY_DATA start: 6791 length 23
-    Stream: column 3 section DATA start: 6814 length 5114
-    Stream: column 4 section DATA start: 11928 length 480
-    Stream: column 4 section SECONDARY start: 12408 length 18
-    Stream: column 5 section DATA start: 12426 length 46
-    Stream: column 5 section SECONDARY start: 12472 length 18
+  Stripe: offset: 6309 data: 5761 rows: 50000 tail: 112 index: 433
+    Stream: column 0 section ROW_INDEX start: 6309 length 17
+    Stream: column 1 section ROW_INDEX start: 6326 length 73
+    Stream: column 2 section ROW_INDEX start: 6399 length 79
+    Stream: column 3 section ROW_INDEX start: 6478 length 85
+    Stream: column 4 section ROW_INDEX start: 6563 length 92
+    Stream: column 5 section ROW_INDEX start: 6655 length 87
+    Stream: column 1 section DATA start: 6742 length 30
+    Stream: column 2 section DATA start: 6772 length 24
+    Stream: column 2 section LENGTH start: 6796 length 8
+    Stream: column 2 section DICTIONARY_DATA start: 6804 length 23
+    Stream: column 3 section DATA start: 6827 length 5114
+    Stream: column 4 section DATA start: 11941 length 480
+    Stream: column 4 section SECONDARY start: 12421 length 18
+    Stream: column 5 section DATA start: 12439 length 46
+    Stream: column 5 section SECONDARY start: 12485 length 18
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[6]
@@ -423,7 +423,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 12978 bytes
+File length: 13004 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_merge5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge5.q.out b/ql/src/test/results/clientpositive/llap/orc_merge5.q.out
index dfa5e12..d49c72a 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge5.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge5.q.out
@@ -134,7 +134,7 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY
@@ -335,7 +335,7 @@ POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5b_n0
 POSTHOOK: Output: default@orc_merge5b_n0
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: select * from orc_merge5b_n0
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_merge6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge6.q.out b/ql/src/test/results/clientpositive/llap/orc_merge6.q.out
index c86c35b..1359111 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge6.q.out
@@ -170,9 +170,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS
@@ -466,9 +466,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1
 POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n1
 PREHOOK: type: SHOWPARTITIONS

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_merge7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge7.q.out b/ql/src/test/results/clientpositive/llap/orc_merge7.q.out
index 0b30145..273a5be 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge7.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge7.q.out
@@ -203,7 +203,7 @@ POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
 #### A masked pattern was here ####
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS
@@ -572,7 +572,7 @@ POSTHOOK: Output: default@orc_merge5a_n0
 POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
 Found 1 items
 #### A masked pattern was here ####
-Found 1 items
+Found 2 items
 #### A masked pattern was here ####
 PREHOOK: query: show partitions orc_merge5a_n0
 PREHOOK: type: SHOWPARTITIONS

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_ppd_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_date.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_date.q.out
index f86fe77..84cd53d 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_date.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_date.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da='1970-02-20'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da= date '1970-02-20'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da= date '1970-02
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-20' as date)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-20' as date)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-20' as varchar(20))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-20' as varchar(20))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da=cast('1970-02-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da!='1970-02-20'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da!='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da!='1970-02-20'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -98,7 +98,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da!='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-27'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -107,7 +107,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-27'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-27'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-27'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-29'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-29'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-29'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -134,7 +134,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-29'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<'1970-02-15'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -161,7 +161,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-20'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-20'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-27'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-27'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-27'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da<='1970-02-27'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970-02-21' as date), cast('1970-02-27' as date))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970-02-21' as date), cast('1970-02-27' as date))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -206,7 +206,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970-02-20' as date), cast('1970-02-27' as date))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -215,7 +215,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970-02-20' as date), cast('1970-02-27' as date))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -224,7 +224,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da in (cast('1970-02-21' as date), cast('1970-02-22' as date))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -251,7 +251,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-02-19' and '1970-02-22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -260,7 +260,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-02-19' and '1970-02-28'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -269,7 +269,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-02-19' and '1970-02-28'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3
@@ -278,7 +278,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n3
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n3 where da between '1970-02-18' and '1970-02-19'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n3

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_ppd_decimal.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_decimal.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_decimal.q.out
index 4b535d4..15f52ec 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_decimal.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_decimal.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=0.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d='0.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d='0.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d='0.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d='0.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=cast('0.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=cast('0.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=cast('0.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d=cast('0.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=0.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=0.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!='0.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -98,7 +98,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!='0.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!='0.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -107,7 +107,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!='0.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=cast('0.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=cast('0.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=cast('0.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d!=cast('0.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<11.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -134,7 +134,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<11.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<11.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -143,7 +143,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<11.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<'11.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -152,7 +152,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<'11.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<'11.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -161,7 +161,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<'11.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<cast('11.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<cast('11.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<cast('11.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<cast('11.22' as
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -206,7 +206,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -215,7 +215,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<='11.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -224,7 +224,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<='11.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<='11.22'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -233,7 +233,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<='11.22'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -242,7 +242,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -251,7 +251,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' as decimal)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -260,7 +260,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' as decimal)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -269,7 +269,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=cast('11.22' a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22BD
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -278,7 +278,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22BD
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22BD
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -287,7 +287,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=11.22BD
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=12
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -296,7 +296,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=12
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=12
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -305,7 +305,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d<=12
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '1.0')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -314,7 +314,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '1.
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '1.0')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -323,7 +323,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '1.
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '11.22')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -332,7 +332,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '11
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '11.22')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -341,7 +341,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.22', '11
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', '1.0')
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -368,7 +368,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -377,7 +377,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22, cast('11.22' as float))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -386,7 +386,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22, cast('11.22' as float))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -395,7 +395,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d in ('0.9', 0.22
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -404,7 +404,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -413,7 +413,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -422,7 +422,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -431,7 +431,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and '2.0'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -440,7 +440,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and '
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and '2.0'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -449,7 +449,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and '
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and cast(3 as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -458,7 +458,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and cast(3 as float)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -467,7 +467,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 0 and c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 1 and cast(30 as char(10))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -476,7 +476,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 1 and c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 1 and cast(30 as char(10))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n5
@@ -485,4 +485,4 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n5 where d between 1 and c
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n5
 #### A masked pattern was here ####
-336445133500
+334427804500

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_ppd_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_timestamp.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_timestamp.q.out
index bfaf50e..29fad31 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_timestamp.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where cast(ts as string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where cast(ts as string)='2011-01-01 01:01:01'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where cast(ts as string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-01 01:01:01' as varchar(20))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-01 01:01:01' as varchar(20))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts=cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts!=cast('2011-01-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts!=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1033234345500
+1033237945500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts!=cast('2011-01-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts!=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1033234345500
+1033237945500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-20 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -98,7 +98,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-20 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -107,7 +107,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-22 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-22 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2011-01-
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<cast('2010-10-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -152,7 +152,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -161,7 +161,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01-20 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01-20 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts<=cast('2011-01
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011-01-02 01:01:01' as timestamp), cast('2011-01-20 01:01:01' as timestamp))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1033234345500
+1033237945500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011-01-02 01:01:01' as timestamp), cast('2011-01-20 01:01:01' as timestamp))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1033234345500
+1033237945500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011-01-01 01:01:01' as timestamp), cast('2011-01-20 01:01:01' as timestamp))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -206,7 +206,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011-01-01 01:01:01' as timestamp), cast('2011-01-20 01:01:01' as timestamp))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -215,7 +215,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts in (cast('2011-01-02 01:01:01' as timestamp), cast('2011-01-08 01:01:01' as timestamp))
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -242,7 +242,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('2010-10-01 01:01:01' as timestamp) and cast('2011-01-08 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -251,7 +251,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-445649415500
+445653015500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('2010-10-01 01:01:01' as timestamp) and cast('2011-01-25 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -260,7 +260,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('2010-10-01 01:01:01' as timestamp) and cast('2011-01-25 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2
@@ -269,7 +269,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n2
 #### A masked pattern was here ####
-1478883761000
+1478890961000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n2 where ts between cast('2010-10-01 01:01:01' as timestamp) and cast('2010-11-01 01:01:01' as timestamp)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n2

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/orc_ppd_varchar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_varchar.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_varchar.q.out
index 897d33f..12d6f67 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_varchar.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_varchar.q.out
@@ -26,7 +26,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v="bee"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v="bee"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -35,7 +35,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v="bee"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v!="bee"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -44,7 +44,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v!="bee"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v!="bee"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -53,7 +53,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v!="bee"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-336445133500
+334427804500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<"world"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -62,7 +62,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<"world"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<"world"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -71,7 +71,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<"world"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<="world"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -80,7 +80,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<="world"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<="world"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -89,7 +89,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v<="world"
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v="bee   "
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -116,7 +116,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "ora
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "orange")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -125,7 +125,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "ora
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "world")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -134,7 +134,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "wor
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "world")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -143,7 +143,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("bee", "wor
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v in ("orange")
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -170,7 +170,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" and "orange"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -179,7 +179,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
--250934600000
+-252951929000
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -188,7 +188,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" and "zombie"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1
@@ -197,7 +197,7 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "bee" a
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@newtypesorc_n1
 #### A masked pattern was here ####
-85510533500
+81475875500
 PREHOOK: query: select sum(hash(*)) from newtypesorc_n1 where v between "orange" and "pine"
 PREHOOK: type: QUERY
 PREHOOK: Input: default@newtypesorc_n1


[19/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
index fa2cdf3..1cd498f 100644
--- a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
@@ -1505,8 +1505,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2002-03-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2002-03-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2002-03-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2002-04-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2002-02-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarGreaterTim
 estampColumn(val 2002-04-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2002-04-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2002-02-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timest
 amp, val 1-2) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2002-04-01 01:02:03)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 0-0) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColLessTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalYearMonthS
 calar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp))
-                    predicate: (((ts + INTERVAL'1-2') < TIMESTAMP'2002-04-01 01:02:03') and ((ts + INTERVAL'1-2') <= TIMESTAMP'2002-03-01 01:02:03') and ((ts + INTERVAL'1-2') <> TIMESTAMP'2002-04-01 01:02:03') and ((ts + INTERVAL'1-2') = TIMESTAMP'2002-03-01 01:02:03') and ((ts + INTERVAL'1-2') > TIMESTAMP'2002-02-01 01:02:03') and ((ts + INTERVAL'1-2') >= TIMESTAMP'2002-03-01 01:02:03') and (TIMESTAMP'2002-02-01 01:02:03' < (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03' <= (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03' = (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03' >= (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-04-01 01:02:03' <> (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-04-01 01:02:03' > (ts + INTERVAL'1-2')) and (ts < (ts + INTERVAL'1-0')) and (ts <= (ts + INTERVAL'1-0')) and (ts <> (ts + INTERVAL'1-0')) and (ts = (ts + INTERVAL'0-0')) and (ts > (ts - INTERVAL'1-0')) and (ts >= (ts - INTERVAL'1-0'))) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2002-03-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2002-03-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2002-03-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2002-04-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2002-02-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampScalar
 GreaterTimestampColumn(val 2002-04-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03.0)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03.0)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 2002-03-01 01:02:03.0)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2002-04-01 01:02:03.0)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2002-02-01 01:02:03.0)(children: TimestampColAddIntervalYearMo
 nthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2002-04-01 01:02:03.0)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 0-0) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColLessTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSu
 btractIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalYearMonthScalar(col 0:timestamp, val 1-0) -> 7:timestamp))
+                    predicate: (((ts + INTERVAL'1-2') < TIMESTAMP'2002-04-01 01:02:03.0') and ((ts + INTERVAL'1-2') <= TIMESTAMP'2002-03-01 01:02:03.0') and ((ts + INTERVAL'1-2') <> TIMESTAMP'2002-04-01 01:02:03.0') and ((ts + INTERVAL'1-2') = TIMESTAMP'2002-03-01 01:02:03.0') and ((ts + INTERVAL'1-2') > TIMESTAMP'2002-02-01 01:02:03.0') and ((ts + INTERVAL'1-2') >= TIMESTAMP'2002-03-01 01:02:03.0') and (TIMESTAMP'2002-02-01 01:02:03.0' < (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03.0' <= (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03.0' = (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-03-01 01:02:03.0' >= (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-04-01 01:02:03.0' <> (ts + INTERVAL'1-2')) and (TIMESTAMP'2002-04-01 01:02:03.0' > (ts + INTERVAL'1-2')) and (ts < (ts + INTERVAL'1-0')) and (ts <= (ts + INTERVAL'1-0')) and (ts <> (ts + INTERVAL'1-0')) and (ts = (ts + INTERVAL'0-0')) and (ts > (ts - INTERVAL'1-0')) and (ts >= (ts - INTERVAL'1-0'))) (type: boolean)
                     Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ts (type: timestamp)
@@ -1699,8 +1699,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), F
 ilterTimestampScalarGreaterTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(c
 hildren: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColLessTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColGreaterEqu
 alTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp))
-                    predicate: (((dt + INTERVAL'0 01:02:03.000000000') = TIMESTAMP'2001-01-01 01:02:03') and ((dt + INTERVAL'0 01:02:03.000000000') >= TIMESTAMP'2001-01-01 01:02:03') and ((dt + INTERVAL'0 01:02:04.000000000') <> TIMESTAMP'2001-01-01 01:02:03') and ((dt + INTERVAL'0 01:02:04.000000000') > TIMESTAMP'2001-01-01 01:02:03') and ((dt - INTERVAL'0 01:02:03.000000000') <= TIMESTAMP'2001-01-01 01:02:03') and ((dt - INTERVAL'0 01:02:04.000000000') < TIMESTAMP'2001-01-01 01:02:03') and (TIMESTAMP'2001-01-01 01:02:03' < (dt + INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' <= (dt + INTERVAL'0 01:02:03.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' <> (dt + INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' = (dt + INTERVAL'0 01:02:03.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' > (dt - INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' >= (dt - INTERVAL'0 01:02:03.000000000')) and (ts < (dt + INTERVAL'0 01:02:04.000
 000000')) and (ts <= (dt + INTERVAL'0 01:02:03.000000000')) and (ts <> (dt + INTERVAL'0 01:02:04.000000000')) and (ts = (dt + INTERVAL'0 01:02:03.000000000')) and (ts > (dt - INTERVAL'0 01:02:04.000000000')) and (ts >= (dt - INTERVAL'0 01:02:03.000000000'))) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:tim
 estamp), FilterTimestampScalarGreaterTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 20
 01-01-01 01:02:03.0)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColLessTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColAddIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp), Filt
 erTimestampColGreaterEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:03.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: DateColSubtractIntervalDayTimeScalar(col 1:date, val 0 01:02:04.000000000) -> 7:timestamp))
+                    predicate: (((dt + INTERVAL'0 01:02:03.000000000') = TIMESTAMP'2001-01-01 01:02:03.0') and ((dt + INTERVAL'0 01:02:03.000000000') >= TIMESTAMP'2001-01-01 01:02:03.0') and ((dt + INTERVAL'0 01:02:04.000000000') <> TIMESTAMP'2001-01-01 01:02:03.0') and ((dt + INTERVAL'0 01:02:04.000000000') > TIMESTAMP'2001-01-01 01:02:03.0') and ((dt - INTERVAL'0 01:02:03.000000000') <= TIMESTAMP'2001-01-01 01:02:03.0') and ((dt - INTERVAL'0 01:02:04.000000000') < TIMESTAMP'2001-01-01 01:02:03.0') and (TIMESTAMP'2001-01-01 01:02:03.0' < (dt + INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' <= (dt + INTERVAL'0 01:02:03.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' <> (dt + INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' = (dt + INTERVAL'0 01:02:03.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' > (dt - INTERVAL'0 01:02:04.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' >= (dt - INTERVAL'0 01:02:03.000000000')) and (ts < (dt +
  INTERVAL'0 01:02:04.000000000')) and (ts <= (dt + INTERVAL'0 01:02:03.000000000')) and (ts <> (dt + INTERVAL'0 01:02:04.000000000')) and (ts = (dt + INTERVAL'0 01:02:03.000000000')) and (ts > (dt - INTERVAL'0 01:02:04.000000000')) and (ts >= (dt - INTERVAL'0 01:02:03.000000000'))) (type: boolean)
                     Statistics: Num rows: 1 Data size: 96 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ts (type: timestamp)
@@ -1893,8 +1893,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:times
 tamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarGreaterTimestampColumn(val 2001-01-01 01:02:03, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 
 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessTimestampColumn(col 0:tim
 estamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp))
-                    predicate: (((ts + INTERVAL'0 00:00:00.000000000') = TIMESTAMP'2001-01-01 01:02:03') and ((ts + INTERVAL'1 00:00:00.000000000') <> TIMESTAMP'2001-01-01 01:02:03') and ((ts + INTERVAL'1 00:00:00.000000000') > TIMESTAMP'2001-01-01 01:02:03') and ((ts + INTERVAL'1 00:00:00.000000000') >= TIMESTAMP'2001-01-01 01:02:03') and ((ts - INTERVAL'1 00:00:00.000000000') < TIMESTAMP'2001-01-01 01:02:03') and ((ts - INTERVAL'1 00:00:00.000000000') <= TIMESTAMP'2001-01-01 01:02:03') and (TIMESTAMP'2001-01-01 01:02:03' < (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' <= (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' <> (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' = (ts + INTERVAL'0 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' > (ts - INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03' >= (ts - INTERVAL'1 00:00:00.000000000')) and (ts < (ts + INTERVAL'1 00:00:00.000
 000000')) and (ts <= (ts + INTERVAL'1 00:00:00.000000000')) and (ts <> (ts + INTERVAL'1 00:00:00.000000000')) and (ts = (ts + INTERVAL'0 00:00:00.000000000')) and (ts > (ts - INTERVAL'1 00:00:00.000000000')) and (ts >= (ts - INTERVAL'1 00:00:00.000000000'))) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterTimestampScalarEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarNotEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarLessEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarLessTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarGreaterEqualTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(c
 ol 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampScalarGreaterTimestampColumn(val 2001-01-01 01:02:03.0, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:
 00:00.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessTimestampScalar(col 7:timestamp, val 2001-01-01 01:02:03.0)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 0 00:00:00.000000000) -> 7:timestamp), FilterTimestampColNotEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColLessT
 imestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterEqualTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp), FilterTimestampColGreaterTimestampColumn(col 0:timestamp, col 7:timestamp)(children: TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 00:00:00.000000000) -> 7:timestamp))
+                    predicate: (((ts + INTERVAL'0 00:00:00.000000000') = TIMESTAMP'2001-01-01 01:02:03.0') and ((ts + INTERVAL'1 00:00:00.000000000') <> TIMESTAMP'2001-01-01 01:02:03.0') and ((ts + INTERVAL'1 00:00:00.000000000') > TIMESTAMP'2001-01-01 01:02:03.0') and ((ts + INTERVAL'1 00:00:00.000000000') >= TIMESTAMP'2001-01-01 01:02:03.0') and ((ts - INTERVAL'1 00:00:00.000000000') < TIMESTAMP'2001-01-01 01:02:03.0') and ((ts - INTERVAL'1 00:00:00.000000000') <= TIMESTAMP'2001-01-01 01:02:03.0') and (TIMESTAMP'2001-01-01 01:02:03.0' < (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' <= (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' <> (ts + INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' = (ts + INTERVAL'0 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' > (ts - INTERVAL'1 00:00:00.000000000')) and (TIMESTAMP'2001-01-01 01:02:03.0' >= (ts - INTERVAL'1 00:00:00.000000000')) and (ts < (ts +
  INTERVAL'1 00:00:00.000000000')) and (ts <= (ts + INTERVAL'1 00:00:00.000000000')) and (ts <> (ts + INTERVAL'1 00:00:00.000000000')) and (ts = (ts + INTERVAL'0 00:00:00.000000000')) and (ts > (ts - INTERVAL'1 00:00:00.000000000')) and (ts >= (ts - INTERVAL'1 00:00:00.000000000'))) (type: boolean)
                     Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: ts (type: timestamp)

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
index 2fadf29..af5815f 100644
--- a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
@@ -175,7 +175,7 @@ POSTHOOK: Input: default@interval_arithmetic_1
 dateval	_c1	_c2	_c3	_c4	_c5	_c6
 0004-09-22	0002-07-22	0006-11-22	0006-11-22	0002-07-22	0002-07-22	0006-11-22
 0528-10-27	0526-08-27	0530-12-27	0530-12-27	0526-08-27	0526-08-27	0530-12-27
-1319-02-02	1316-12-03	1321-04-02	1321-04-02	1316-12-03	1316-12-03	1321-04-02
+1319-02-02	1316-12-02	1321-04-02	1321-04-02	1316-12-02	1316-12-02	1321-04-02
 1404-07-23	1402-05-23	1406-09-23	1406-09-23	1402-05-23	1402-05-23	1406-09-23
 1815-05-06	1813-03-06	1817-07-06	1817-07-06	1813-03-06	1813-03-06	1817-07-06
 1883-04-17	1881-02-17	1885-06-17	1885-06-17	1881-02-17	1881-02-17	1885-06-17
@@ -272,7 +272,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 3, 4, 5]
-                        selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
+                        selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07 00:00:00.0) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07 00:00:00.0, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
                     Statistics: Num rows: 50 Data size: 2744 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: date)
@@ -349,56 +349,56 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@interval_arithmetic_1
 #### A masked pattern was here ####
 dateval	_c1	_c2	_c3
-0004-09-22	-728551 00:00:00.000000000	728551 00:00:00.000000000	0 00:00:00.000000000
-0528-10-27	-537129 00:00:00.000000000	537129 00:00:00.000000000	0 00:00:00.000000000
-1319-02-02	-248490 00:00:00.000000000	248490 00:00:00.000000000	0 00:00:00.000000000
-1404-07-23	-217273 00:00:00.000000000	217273 00:00:00.000000000	0 00:00:00.000000000
-1815-05-06	-67237 00:00:00.000000000	67237 00:00:00.000000000	0 00:00:00.000000000
-1883-04-17	-42419 00:00:00.000000000	42419 00:00:00.000000000	0 00:00:00.000000000
+0004-09-22	-728552 23:00:00.000000000	728552 23:00:00.000000000	0 00:00:00.000000000
+0528-10-27	-537126 23:00:00.000000000	537126 23:00:00.000000000	0 00:00:00.000000000
+1319-02-02	-248481 23:00:00.000000000	248481 23:00:00.000000000	0 00:00:00.000000000
+1404-07-23	-217263 23:00:00.000000000	217263 23:00:00.000000000	0 00:00:00.000000000
+1815-05-06	-67236 23:00:00.000000000	67236 23:00:00.000000000	0 00:00:00.000000000
+1883-04-17	-42418 23:00:00.000000000	42418 23:00:00.000000000	0 00:00:00.000000000
 1966-08-16	-11983 00:00:00.000000000	11983 00:00:00.000000000	0 00:00:00.000000000
-1973-04-17	-9547 00:00:00.000000000	9547 00:00:00.000000000	0 00:00:00.000000000
+1973-04-17	-9546 23:00:00.000000000	9546 23:00:00.000000000	0 00:00:00.000000000
 1974-10-04	-9012 00:00:00.000000000	9012 00:00:00.000000000	0 00:00:00.000000000
-1976-03-03	-8496 00:00:00.000000000	8496 00:00:00.000000000	0 00:00:00.000000000
+1976-03-03	-8495 23:00:00.000000000	8495 23:00:00.000000000	0 00:00:00.000000000
 1976-05-06	-8432 00:00:00.000000000	8432 00:00:00.000000000	0 00:00:00.000000000
 1978-08-05	-7611 00:00:00.000000000	7611 00:00:00.000000000	0 00:00:00.000000000
-1981-04-25	-6617 00:00:00.000000000	6617 00:00:00.000000000	0 00:00:00.000000000
-1981-11-15	-6413 00:00:00.000000000	6413 00:00:00.000000000	0 00:00:00.000000000
+1981-04-25	-6616 23:00:00.000000000	6616 23:00:00.000000000	0 00:00:00.000000000
+1981-11-15	-6412 23:00:00.000000000	6412 23:00:00.000000000	0 00:00:00.000000000
 1985-07-20	-5070 00:00:00.000000000	5070 00:00:00.000000000	0 00:00:00.000000000
-1985-11-18	-4949 00:00:00.000000000	4949 00:00:00.000000000	0 00:00:00.000000000
-1987-02-21	-4489 00:00:00.000000000	4489 00:00:00.000000000	0 00:00:00.000000000
+1985-11-18	-4948 23:00:00.000000000	4948 23:00:00.000000000	0 00:00:00.000000000
+1987-02-21	-4488 23:00:00.000000000	4488 23:00:00.000000000	0 00:00:00.000000000
 1987-05-28	-4393 00:00:00.000000000	4393 00:00:00.000000000	0 00:00:00.000000000
 1998-10-16	-234 00:00:00.000000000	234 00:00:00.000000000	0 00:00:00.000000000
 1999-10-03	118 00:00:00.000000000	-118 00:00:00.000000000	0 00:00:00.000000000
-2000-12-18	560 00:00:00.000000000	-560 00:00:00.000000000	0 00:00:00.000000000
+2000-12-18	560 01:00:00.000000000	-560 01:00:00.000000000	0 00:00:00.000000000
 2002-05-10	1068 00:00:00.000000000	-1068 00:00:00.000000000	0 00:00:00.000000000
 2003-09-23	1569 00:00:00.000000000	-1569 00:00:00.000000000	0 00:00:00.000000000
-2004-03-07	1735 00:00:00.000000000	-1735 00:00:00.000000000	0 00:00:00.000000000
-2007-02-09	2804 00:00:00.000000000	-2804 00:00:00.000000000	0 00:00:00.000000000
-2009-01-21	3516 00:00:00.000000000	-3516 00:00:00.000000000	0 00:00:00.000000000
+2004-03-07	1735 01:00:00.000000000	-1735 01:00:00.000000000	0 00:00:00.000000000
+2007-02-09	2804 01:00:00.000000000	-2804 01:00:00.000000000	0 00:00:00.000000000
+2009-01-21	3516 01:00:00.000000000	-3516 01:00:00.000000000	0 00:00:00.000000000
 2010-04-08	3958 00:00:00.000000000	-3958 00:00:00.000000000	0 00:00:00.000000000
 2013-04-07	5053 00:00:00.000000000	-5053 00:00:00.000000000	0 00:00:00.000000000
 2013-04-10	5056 00:00:00.000000000	-5056 00:00:00.000000000	0 00:00:00.000000000
 2021-09-24	8145 00:00:00.000000000	-8145 00:00:00.000000000	0 00:00:00.000000000
-2024-11-11	9289 00:00:00.000000000	-9289 00:00:00.000000000	0 00:00:00.000000000
+2024-11-11	9289 01:00:00.000000000	-9289 01:00:00.000000000	0 00:00:00.000000000
 4143-07-08	783111 00:00:00.000000000	-783111 00:00:00.000000000	0 00:00:00.000000000
-4966-12-04	1083855 00:00:00.000000000	-1083855 00:00:00.000000000	0 00:00:00.000000000
-5339-02-01	1219784 00:00:00.000000000	-1219784 00:00:00.000000000	0 00:00:00.000000000
+4966-12-04	1083855 01:00:00.000000000	-1083855 01:00:00.000000000	0 00:00:00.000000000
+5339-02-01	1219784 01:00:00.000000000	-1219784 01:00:00.000000000	0 00:00:00.000000000
 5344-10-04	1221856 00:00:00.000000000	-1221856 00:00:00.000000000	0 00:00:00.000000000
 5397-07-13	1241131 00:00:00.000000000	-1241131 00:00:00.000000000	0 00:00:00.000000000
 5966-07-09	1448949 00:00:00.000000000	-1448949 00:00:00.000000000	0 00:00:00.000000000
 6229-06-28	1544997 00:00:00.000000000	-1544997 00:00:00.000000000	0 00:00:00.000000000
 6482-04-27	1637342 00:00:00.000000000	-1637342 00:00:00.000000000	0 00:00:00.000000000
-6631-11-13	1691962 00:00:00.000000000	-1691962 00:00:00.000000000	0 00:00:00.000000000
+6631-11-13	1691962 01:00:00.000000000	-1691962 01:00:00.000000000	0 00:00:00.000000000
 6705-09-28	1718944 00:00:00.000000000	-1718944 00:00:00.000000000	0 00:00:00.000000000
-6731-02-12	1728212 00:00:00.000000000	-1728212 00:00:00.000000000	0 00:00:00.000000000
-7160-12-02	1885195 00:00:00.000000000	-1885195 00:00:00.000000000	0 00:00:00.000000000
+6731-02-12	1728212 01:00:00.000000000	-1728212 01:00:00.000000000	0 00:00:00.000000000
+7160-12-02	1885195 01:00:00.000000000	-1885195 01:00:00.000000000	0 00:00:00.000000000
 7409-09-07	1976054 00:00:00.000000000	-1976054 00:00:00.000000000	0 00:00:00.000000000
 7503-06-23	2010310 00:00:00.000000000	-2010310 00:00:00.000000000	0 00:00:00.000000000
 8422-07-22	2345998 00:00:00.000000000	-2345998 00:00:00.000000000	0 00:00:00.000000000
-8521-01-16	2381970 00:00:00.000000000	-2381970 00:00:00.000000000	0 00:00:00.000000000
+8521-01-16	2381970 01:00:00.000000000	-2381970 01:00:00.000000000	0 00:00:00.000000000
 9075-06-13	2584462 00:00:00.000000000	-2584462 00:00:00.000000000	0 00:00:00.000000000
-9209-11-11	2633556 00:00:00.000000000	-2633556 00:00:00.000000000	0 00:00:00.000000000
-9403-01-09	2704106 00:00:00.000000000	-2704106 00:00:00.000000000	0 00:00:00.000000000
+9209-11-11	2633556 01:00:00.000000000	-2633556 01:00:00.000000000	0 00:00:00.000000000
+9403-01-09	2704106 01:00:00.000000000	-2704106 01:00:00.000000000	0 00:00:00.000000000
 PREHOOK: query: explain vectorization expression
 select
   tsval,
@@ -539,53 +539,53 @@ POSTHOOK: Input: default@interval_arithmetic_1
 tsval	_c1	_c2	_c3	_c4	_c5	_c6
 0004-09-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0006-11-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0002-07-22 18:26:29.519542222	0006-11-22 18:26:29.519542222
 0528-10-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0530-12-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0526-08-27 08:15:18.941718273	0530-12-27 08:15:18.941718273
-1319-02-02 16:31:57.778	1316-12-03 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-03 16:31:57.778	1316-12-03 16:31:57.778	1321-04-02 16:31:57.778
+1319-02-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778	1321-04-02 16:31:57.778	1316-12-02 16:31:57.778	1316-12-02 16:31:57.778	1321-04-02 16:31:57.778
 1404-07-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1406-09-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1402-05-23 15:32:16.059185026	1406-09-23 15:32:16.059185026
 1815-05-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1817-07-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1813-03-06 00:12:37.543584705	1817-07-06 00:12:37.543584705
 1883-04-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1885-06-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1881-02-17 04:14:34.647766229	1885-06-17 04:14:34.647766229
 1966-08-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1968-10-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1964-06-16 13:36:50.183618031	1968-10-16 13:36:50.183618031
-1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 06:30:38.596784156	1975-06-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 06:30:38.596784156
-1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 17:21:03.989	1976-12-04 17:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 17:21:03.989
-1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 04:54:33.000895162	1978-05-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 04:54:33.000895162
+1973-04-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156	1975-06-17 07:30:38.596784156	1971-02-17 06:30:38.596784156	1971-02-17 06:30:38.596784156	1975-06-17 07:30:38.596784156
+1974-10-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989	1976-12-04 16:21:03.989	1972-08-04 17:21:03.989	1972-08-04 17:21:03.989	1976-12-04 16:21:03.989
+1976-03-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162	1978-05-03 05:54:33.000895162	1974-01-03 04:54:33.000895162	1974-01-03 04:54:33.000895162	1978-05-03 05:54:33.000895162
 1976-05-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1978-07-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1974-03-06 00:42:30.910786948	1978-07-06 00:42:30.910786948
 1978-08-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501	1980-10-05 14:41:05.501	1976-06-05 14:41:05.501	1976-06-05 14:41:05.501	1980-10-05 14:41:05.501
-1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 09:01:12.077192689	1983-06-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 09:01:12.077192689
-1981-11-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1979-09-15 23:03:10.999338387	1984-01-15 23:03:10.999338387
+1981-04-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689	1983-06-25 10:01:12.077192689	1979-02-25 09:01:12.077192689	1979-02-25 09:01:12.077192689	1983-06-25 10:01:12.077192689
+1981-11-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387	1984-01-15 23:03:10.999338387	1979-09-16 00:03:10.999338387	1979-09-16 00:03:10.999338387	1984-01-15 23:03:10.999338387
 1985-07-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11	1987-09-20 09:30:11	1983-05-20 09:30:11	1983-05-20 09:30:11	1987-09-20 09:30:11
-1985-11-18 16:37:54	1983-09-18 16:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 16:37:54	1983-09-18 16:37:54	1988-01-18 16:37:54
-1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 19:48:29	1989-04-21 19:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 19:48:29
-1987-05-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1985-03-28 13:52:07.900916635	1989-07-28 13:52:07.900916635
-1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 20:05:29.397591987	2000-12-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 20:05:29.397591987
-1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 16:59:10.396903939	2001-12-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 16:59:10.396903939
-2000-12-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	1998-10-18 08:42:30.000595596	2003-02-18 08:42:30.000595596
-2002-05-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2000-03-10 05:29:48.990818073	2004-07-10 05:29:48.990818073
-2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 22:33:17.00003252	2005-11-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 22:33:17.00003252
-2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 20:14:13	2006-05-07 20:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 20:14:13
-2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 05:17:29.368756876	2009-04-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 05:17:29.368756876
-2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 10:49:07.108	2011-03-21 10:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 10:49:07.108
-2010-04-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2008-02-08 02:43:35.861742727	2012-06-08 02:43:35.861742727
-2013-04-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2011-02-07 02:44:43.00086821	2015-06-07 02:44:43.00086821
-2013-04-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2011-02-10 00:43:46.854731546	2015-06-10 00:43:46.854731546
-2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 03:18:32.413655165	2023-11-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 03:18:32.413655165
-2024-11-11 16:42:41.101	2022-09-11 16:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 16:42:41.101	2022-09-11 16:42:41.101	2027-01-11 16:42:41.101
+1985-11-18 16:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54	1988-01-18 16:37:54	1983-09-18 17:37:54	1983-09-18 17:37:54	1988-01-18 16:37:54
+1987-02-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29	1989-04-21 20:48:29	1984-12-21 19:48:29	1984-12-21 19:48:29	1989-04-21 20:48:29
+1987-05-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635	1989-07-28 13:52:07.900916635	1985-03-28 12:52:07.900916635	1985-03-28 12:52:07.900916635	1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987	2000-12-16 19:05:29.397591987	1996-08-16 20:05:29.397591987	1996-08-16 20:05:29.397591987	2000-12-16 19:05:29.397591987
+1999-10-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939	2001-12-03 15:59:10.396903939	1997-08-03 16:59:10.396903939	1997-08-03 16:59:10.396903939	2001-12-03 15:59:10.396903939
+2000-12-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596	2003-02-18 08:42:30.000595596	1998-10-18 09:42:30.000595596	1998-10-18 09:42:30.000595596	2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073	2004-07-10 05:29:48.990818073	2000-03-10 04:29:48.990818073	2000-03-10 04:29:48.990818073	2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252	2005-11-23 21:33:17.00003252	2001-07-23 22:33:17.00003252	2001-07-23 22:33:17.00003252	2005-11-23 21:33:17.00003252
+2004-03-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13	2006-05-07 21:14:13	2002-01-07 20:14:13	2002-01-07 20:14:13	2006-05-07 21:14:13
+2007-02-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876	2009-04-09 06:17:29.368756876	2004-12-09 05:17:29.368756876	2004-12-09 05:17:29.368756876	2009-04-09 06:17:29.368756876
+2009-01-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108	2011-03-21 11:49:07.108	2006-11-21 10:49:07.108	2006-11-21 10:49:07.108	2011-03-21 11:49:07.108
+2010-04-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727	2012-06-08 02:43:35.861742727	2008-02-08 01:43:35.861742727	2008-02-08 01:43:35.861742727	2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821	2015-06-07 02:44:43.00086821	2011-02-07 01:44:43.00086821	2011-02-07 01:44:43.00086821	2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546	2015-06-10 00:43:46.854731546	2011-02-09 23:43:46.854731546	2011-02-09 23:43:46.854731546	2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165	2023-11-24 02:18:32.413655165	2019-07-24 03:18:32.413655165	2019-07-24 03:18:32.413655165	2023-11-24 02:18:32.413655165
+2024-11-11 16:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101	2027-01-11 16:42:41.101	2022-09-11 17:42:41.101	2022-09-11 17:42:41.101	2027-01-11 16:42:41.101
 4143-07-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4145-09-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4141-05-08 10:53:27.252802259	4145-09-08 10:53:27.252802259
-4966-12-04 09:30:55.202	4964-10-04 09:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 09:30:55.202	4964-10-04 09:30:55.202	4969-02-04 09:30:55.202
-5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 14:10:01.085678691	5341-04-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 14:10:01.085678691
-5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 18:40:08.165	5346-12-04 18:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 18:40:08.165
+4966-12-04 09:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202	4969-02-04 09:30:55.202	4964-10-04 10:30:55.202	4964-10-04 10:30:55.202	4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691	5341-04-01 15:10:01.085678691	5336-12-01 14:10:01.085678691	5336-12-01 14:10:01.085678691	5341-04-01 15:10:01.085678691
+5344-10-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165	5346-12-04 17:40:08.165	5342-08-04 18:40:08.165	5342-08-04 18:40:08.165	5346-12-04 17:40:08.165
 5397-07-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5399-09-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5395-05-13 07:12:32.000896438	5399-09-13 07:12:32.000896438
 5966-07-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597	5968-09-09 03:30:50.597	5964-05-09 03:30:50.597	5964-05-09 03:30:50.597	5968-09-09 03:30:50.597
 6229-06-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6231-08-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6227-04-28 02:54:28.970117179	6231-08-28 02:54:28.970117179
-6482-04-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6480-02-27 12:07:38.073915413	6484-06-27 12:07:38.073915413
-6631-11-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6629-09-13 16:31:29.702202248	6634-01-13 16:31:29.702202248
-6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 18:27:28.000845672	6707-11-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 18:27:28.000845672
-6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 08:12:48.287783702	6733-04-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 08:12:48.287783702
-7160-12-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7158-10-02 06:00:24.81200852	7163-02-02 06:00:24.81200852
-7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 23:33:32.459349602	7411-11-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 23:33:32.459349602
+6482-04-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413	6484-06-27 12:07:38.073915413	6480-02-27 11:07:38.073915413	6480-02-27 11:07:38.073915413	6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248	6634-01-13 16:31:29.702202248	6629-09-13 17:31:29.702202248	6629-09-13 17:31:29.702202248	6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672	6707-11-28 17:27:28.000845672	6703-07-28 18:27:28.000845672	6703-07-28 18:27:28.000845672	6707-11-28 17:27:28.000845672
+6731-02-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702	6733-04-12 09:12:48.287783702	6728-12-12 08:12:48.287783702	6728-12-12 08:12:48.287783702	6733-04-12 09:12:48.287783702
+7160-12-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852	7163-02-02 06:00:24.81200852	7158-10-02 07:00:24.81200852	7158-10-02 07:00:24.81200852	7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602	7411-11-07 22:33:32.459349602	7407-07-07 23:33:32.459349602	7407-07-07 23:33:32.459349602	7411-11-07 22:33:32.459349602
 7503-06-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486	7505-08-23 23:14:17.486	7501-04-23 23:14:17.486	7501-04-23 23:14:17.486	7505-08-23 23:14:17.486
 8422-07-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8424-09-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8420-05-22 03:21:45.745036084	8424-09-22 03:21:45.745036084
-8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 20:42:05.668832388	8523-03-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 20:42:05.668832388
+8521-01-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388	8523-03-16 21:42:05.668832388	8518-11-16 20:42:05.668832388	8518-11-16 20:42:05.668832388	8523-03-16 21:42:05.668832388
 9075-06-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9077-08-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9073-04-13 16:20:09.218517797	9077-08-13 16:20:09.218517797
-9209-11-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9207-09-11 04:08:58.223768453	9212-01-11 04:08:58.223768453
+9209-11-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453	9212-01-11 04:08:58.223768453	9207-09-11 05:08:58.223768453	9207-09-11 05:08:58.223768453	9212-01-11 04:08:58.223768453
 9403-01-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547	9405-03-09 18:12:33.547	9400-11-09 18:12:33.547	9400-11-09 18:12:33.547	9405-03-09 18:12:33.547
 PREHOOK: query: explain vectorization expression
 select
@@ -832,50 +832,50 @@ dateval	_c1	_c2	_c3	_c4	_c5	_c6
 1404-07-23	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789	1404-10-30 11:22:33.123456789	1404-04-14 12:37:26.876543211	1404-04-14 12:37:26.876543211	1404-10-30 11:22:33.123456789
 1815-05-06	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789	1815-08-13 11:22:33.123456789	1815-01-26 12:37:26.876543211	1815-01-26 12:37:26.876543211	1815-08-13 11:22:33.123456789
 1883-04-17	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789	1883-07-25 11:22:33.123456789	1883-01-07 12:37:26.876543211	1883-01-07 12:37:26.876543211	1883-07-25 11:22:33.123456789
-1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 11:22:33.123456789	1966-11-23 11:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 11:22:33.123456789
-1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 11:22:33.123456789	1973-07-25 11:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 11:22:33.123456789
-1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 11:22:33.123456789	1975-01-11 11:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 11:22:33.123456789
-1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 11:22:33.123456789	1976-06-10 11:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 11:22:33.123456789
-1976-05-06	1976-01-27 12:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 12:37:26.876543211	1976-01-27 12:37:26.876543211	1976-08-13 11:22:33.123456789
-1978-08-05	1978-04-27 12:37:26.876543211	1978-11-12 11:22:33.123456789	1978-11-12 11:22:33.123456789	1978-04-27 12:37:26.876543211	1978-04-27 12:37:26.876543211	1978-11-12 11:22:33.123456789
-1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 11:22:33.123456789	1981-08-02 11:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 11:22:33.123456789
-1981-11-15	1981-08-07 12:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 12:37:26.876543211	1981-08-07 12:37:26.876543211	1982-02-22 11:22:33.123456789
-1985-07-20	1985-04-11 12:37:26.876543211	1985-10-27 11:22:33.123456789	1985-10-27 11:22:33.123456789	1985-04-11 12:37:26.876543211	1985-04-11 12:37:26.876543211	1985-10-27 11:22:33.123456789
-1985-11-18	1985-08-10 12:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 12:37:26.876543211	1985-08-10 12:37:26.876543211	1986-02-25 11:22:33.123456789
-1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 11:22:33.123456789	1987-05-31 11:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 11:22:33.123456789
-1987-05-28	1987-02-17 12:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 12:37:26.876543211	1987-02-17 12:37:26.876543211	1987-09-04 11:22:33.123456789
-1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 11:22:33.123456789	1999-01-23 11:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 11:22:33.123456789
-1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 11:22:33.123456789	2000-01-10 11:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 11:22:33.123456789
-2000-12-18	2000-09-09 12:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 12:37:26.876543211	2000-09-09 12:37:26.876543211	2001-03-27 11:22:33.123456789
-2002-05-10	2002-01-30 12:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 12:37:26.876543211	2002-01-30 12:37:26.876543211	2002-08-17 11:22:33.123456789
-2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 11:22:33.123456789	2003-12-31 11:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 11:22:33.123456789
-2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 11:22:33.123456789	2004-06-14 11:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 11:22:33.123456789
-2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 11:22:33.123456789	2007-05-19 11:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 11:22:33.123456789
-2009-01-21	2008-10-13 12:37:26.876543211	2009-04-30 11:22:33.123456789	2009-04-30 11:22:33.123456789	2008-10-13 12:37:26.876543211	2008-10-13 12:37:26.876543211	2009-04-30 11:22:33.123456789
-2010-04-08	2009-12-29 12:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 12:37:26.876543211	2009-12-29 12:37:26.876543211	2010-07-16 11:22:33.123456789
-2013-04-07	2012-12-28 12:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 12:37:26.876543211	2012-12-28 12:37:26.876543211	2013-07-15 11:22:33.123456789
-2013-04-10	2012-12-31 12:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 12:37:26.876543211	2012-12-31 12:37:26.876543211	2013-07-18 11:22:33.123456789
-2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 11:22:33.123456789	2022-01-01 11:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 11:22:33.123456789
-2024-11-11	2024-08-03 12:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 12:37:26.876543211	2024-08-03 12:37:26.876543211	2025-02-18 11:22:33.123456789
+1966-08-16	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789	1966-11-23 10:22:33.123456789	1966-05-08 12:37:26.876543211	1966-05-08 12:37:26.876543211	1966-11-23 10:22:33.123456789
+1973-04-17	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789	1973-07-25 12:22:33.123456789	1973-01-07 12:37:26.876543211	1973-01-07 12:37:26.876543211	1973-07-25 12:22:33.123456789
+1974-10-04	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789	1975-01-11 10:22:33.123456789	1974-06-26 12:37:26.876543211	1974-06-26 12:37:26.876543211	1975-01-11 10:22:33.123456789
+1976-03-03	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789	1976-06-10 12:22:33.123456789	1975-11-24 12:37:26.876543211	1975-11-24 12:37:26.876543211	1976-06-10 12:22:33.123456789
+1976-05-06	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789	1976-08-13 11:22:33.123456789	1976-01-27 11:37:26.876543211	1976-01-27 11:37:26.876543211	1976-08-13 11:22:33.123456789
+1978-08-05	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789	1978-11-12 10:22:33.123456789	1978-04-27 11:37:26.876543211	1978-04-27 11:37:26.876543211	1978-11-12 10:22:33.123456789
+1981-04-25	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789	1981-08-02 12:22:33.123456789	1981-01-15 12:37:26.876543211	1981-01-15 12:37:26.876543211	1981-08-02 12:22:33.123456789
+1981-11-15	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789	1982-02-22 11:22:33.123456789	1981-08-07 13:37:26.876543211	1981-08-07 13:37:26.876543211	1982-02-22 11:22:33.123456789
+1985-07-20	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789	1985-10-27 10:22:33.123456789	1985-04-11 11:37:26.876543211	1985-04-11 11:37:26.876543211	1985-10-27 10:22:33.123456789
+1985-11-18	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789	1986-02-25 11:22:33.123456789	1985-08-10 13:37:26.876543211	1985-08-10 13:37:26.876543211	1986-02-25 11:22:33.123456789
+1987-02-21	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789	1987-05-31 12:22:33.123456789	1986-11-13 12:37:26.876543211	1986-11-13 12:37:26.876543211	1987-05-31 12:22:33.123456789
+1987-05-28	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789	1987-09-04 11:22:33.123456789	1987-02-17 11:37:26.876543211	1987-02-17 11:37:26.876543211	1987-09-04 11:22:33.123456789
+1998-10-16	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789	1999-01-23 10:22:33.123456789	1998-07-08 12:37:26.876543211	1998-07-08 12:37:26.876543211	1999-01-23 10:22:33.123456789
+1999-10-03	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789	2000-01-10 10:22:33.123456789	1999-06-25 12:37:26.876543211	1999-06-25 12:37:26.876543211	2000-01-10 10:22:33.123456789
+2000-12-18	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789	2001-03-27 11:22:33.123456789	2000-09-09 13:37:26.876543211	2000-09-09 13:37:26.876543211	2001-03-27 11:22:33.123456789
+2002-05-10	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789	2002-08-17 11:22:33.123456789	2002-01-30 11:37:26.876543211	2002-01-30 11:37:26.876543211	2002-08-17 11:22:33.123456789
+2003-09-23	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789	2003-12-31 10:22:33.123456789	2003-06-15 12:37:26.876543211	2003-06-15 12:37:26.876543211	2003-12-31 10:22:33.123456789
+2004-03-07	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789	2004-06-14 12:22:33.123456789	2003-11-28 12:37:26.876543211	2003-11-28 12:37:26.876543211	2004-06-14 12:22:33.123456789
+2007-02-09	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789	2007-05-19 12:22:33.123456789	2006-11-01 12:37:26.876543211	2006-11-01 12:37:26.876543211	2007-05-19 12:22:33.123456789
+2009-01-21	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789	2009-04-30 12:22:33.123456789	2008-10-13 13:37:26.876543211	2008-10-13 13:37:26.876543211	2009-04-30 12:22:33.123456789
+2010-04-08	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789	2010-07-16 11:22:33.123456789	2009-12-29 11:37:26.876543211	2009-12-29 11:37:26.876543211	2010-07-16 11:22:33.123456789
+2013-04-07	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789	2013-07-15 11:22:33.123456789	2012-12-28 11:37:26.876543211	2012-12-28 11:37:26.876543211	2013-07-15 11:22:33.123456789
+2013-04-10	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789	2013-07-18 11:22:33.123456789	2012-12-31 11:37:26.876543211	2012-12-31 11:37:26.876543211	2013-07-18 11:22:33.123456789
+2021-09-24	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789	2022-01-01 10:22:33.123456789	2021-06-16 12:37:26.876543211	2021-06-16 12:37:26.876543211	2022-01-01 10:22:33.123456789
+2024-11-11	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789	2025-02-18 11:22:33.123456789	2024-08-03 13:37:26.876543211	2024-08-03 13:37:26.876543211	2025-02-18 11:22:33.123456789
 4143-07-08	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789	4143-10-15 11:22:33.123456789	4143-03-30 12:37:26.876543211	4143-03-30 12:37:26.876543211	4143-10-15 11:22:33.123456789
-4966-12-04	4966-08-26 12:37:26.876543211	4967-03-13 11:22:33.123456789	4967-03-13 11:22:33.123456789	4966-08-26 12:37:26.876543211	4966-08-26 12:37:26.876543211	4967-03-13 11:22:33.123456789
-5339-02-01	5338-10-24 12:37:26.876543211	5339-05-11 11:22:33.123456789	5339-05-11 11:22:33.123456789	5338-10-24 12:37:26.876543211	5338-10-24 12:37:26.876543211	5339-05-11 11:22:33.123456789
-5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 11:22:33.123456789	5345-01-11 11:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 11:22:33.123456789
+4966-12-04	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789	4967-03-13 12:22:33.123456789	4966-08-26 13:37:26.876543211	4966-08-26 13:37:26.876543211	4967-03-13 12:22:33.123456789
+5339-02-01	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789	5339-05-11 12:22:33.123456789	5338-10-24 13:37:26.876543211	5338-10-24 13:37:26.876543211	5339-05-11 12:22:33.123456789
+5344-10-04	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789	5345-01-11 10:22:33.123456789	5344-06-26 12:37:26.876543211	5344-06-26 12:37:26.876543211	5345-01-11 10:22:33.123456789
 5397-07-13	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789	5397-10-20 11:22:33.123456789	5397-04-04 12:37:26.876543211	5397-04-04 12:37:26.876543211	5397-10-20 11:22:33.123456789
 5966-07-09	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789	5966-10-16 11:22:33.123456789	5966-03-31 12:37:26.876543211	5966-03-31 12:37:26.876543211	5966-10-16 11:22:33.123456789
 6229-06-28	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789	6229-10-05 11:22:33.123456789	6229-03-20 12:37:26.876543211	6229-03-20 12:37:26.876543211	6229-10-05 11:22:33.123456789
-6482-04-27	6482-01-17 12:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 12:37:26.876543211	6482-01-17 12:37:26.876543211	6482-08-04 11:22:33.123456789
-6631-11-13	6631-08-05 12:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 12:37:26.876543211	6631-08-05 12:37:26.876543211	6632-02-20 11:22:33.123456789
-6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 11:22:33.123456789	6706-01-05 11:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 11:22:33.123456789
-6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 11:22:33.123456789	6731-05-22 11:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 11:22:33.123456789
-7160-12-02	7160-08-24 12:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 12:37:26.876543211	7160-08-24 12:37:26.876543211	7161-03-11 11:22:33.123456789
-7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 11:22:33.123456789	7409-12-15 11:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 11:22:33.123456789
+6482-04-27	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789	6482-08-04 11:22:33.123456789	6482-01-17 11:37:26.876543211	6482-01-17 11:37:26.876543211	6482-08-04 11:22:33.123456789
+6631-11-13	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789	6632-02-20 11:22:33.123456789	6631-08-05 13:37:26.876543211	6631-08-05 13:37:26.876543211	6632-02-20 11:22:33.123456789
+6705-09-28	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789	6706-01-05 10:22:33.123456789	6705-06-20 12:37:26.876543211	6705-06-20 12:37:26.876543211	6706-01-05 10:22:33.123456789
+6731-02-12	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789	6731-05-22 12:22:33.123456789	6730-11-04 12:37:26.876543211	6730-11-04 12:37:26.876543211	6731-05-22 12:22:33.123456789
+7160-12-02	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789	7161-03-11 11:22:33.123456789	7160-08-24 13:37:26.876543211	7160-08-24 13:37:26.876543211	7161-03-11 11:22:33.123456789
+7409-09-07	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789	7409-12-15 10:22:33.123456789	7409-05-30 12:37:26.876543211	7409-05-30 12:37:26.876543211	7409-12-15 10:22:33.123456789
 7503-06-23	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789	7503-09-30 11:22:33.123456789	7503-03-15 12:37:26.876543211	7503-03-15 12:37:26.876543211	7503-09-30 11:22:33.123456789
 8422-07-22	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789	8422-10-29 11:22:33.123456789	8422-04-13 12:37:26.876543211	8422-04-13 12:37:26.876543211	8422-10-29 11:22:33.123456789
-8521-01-16	8520-10-08 12:37:26.876543211	8521-04-25 11:22:33.123456789	8521-04-25 11:22:33.123456789	8520-10-08 12:37:26.876543211	8520-10-08 12:37:26.876543211	8521-04-25 11:22:33.123456789
-9075-06-13	9075-03-05 12:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 12:37:26.876543211	9075-03-05 12:37:26.876543211	9075-09-20 11:22:33.123456789
-9209-11-11	9209-08-03 12:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 12:37:26.876543211	9209-08-03 12:37:26.876543211	9210-02-18 11:22:33.123456789
-9403-01-09	9402-10-01 12:37:26.876543211	9403-04-18 11:22:33.123456789	9403-04-18 11:22:33.123456789	9402-10-01 12:37:26.876543211	9402-10-01 12:37:26.876543211	9403-04-18 11:22:33.123456789
+8521-01-16	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789	8521-04-25 12:22:33.123456789	8520-10-08 13:37:26.876543211	8520-10-08 13:37:26.876543211	8521-04-25 12:22:33.123456789
+9075-06-13	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789	9075-09-20 11:22:33.123456789	9075-03-05 11:37:26.876543211	9075-03-05 11:37:26.876543211	9075-09-20 11:22:33.123456789
+9209-11-11	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789	9210-02-18 11:22:33.123456789	9209-08-03 13:37:26.876543211	9209-08-03 13:37:26.876543211	9210-02-18 11:22:33.123456789
+9403-01-09	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789	9403-04-18 12:22:33.123456789	9402-10-01 13:37:26.876543211	9402-10-01 13:37:26.876543211	9403-04-18 12:22:33.123456789
 PREHOOK: query: explain vectorization expression
 select
   dateval,
@@ -1200,50 +1200,50 @@ tsval	_c1	_c2	_c3	_c4	_c5	_c6
 1404-07-23 15:32:16.059185026	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815	1404-10-31 02:54:49.182641815	1404-04-15 04:09:42.935728237	1404-04-15 04:09:42.935728237	1404-10-31 02:54:49.182641815
 1815-05-06 00:12:37.543584705	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494	1815-08-13 11:35:10.667041494	1815-01-26 12:50:04.420127916	1815-01-26 12:50:04.420127916	1815-08-13 11:35:10.667041494
 1883-04-17 04:14:34.647766229	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018	1883-07-25 15:37:07.771223018	1883-01-07 16:52:01.52430944	1883-01-07 16:52:01.52430944	1883-07-25 15:37:07.771223018
-1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-24 00:59:23.30707482	1966-11-24 00:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-24 00:59:23.30707482
-1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 17:53:11.720240945	1973-07-25 17:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 17:53:11.720240945
-1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 04:43:37.112456789	1975-01-12 04:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 04:43:37.112456789
-1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 16:17:06.124351951	1976-06-10 16:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 16:17:06.124351951
-1976-05-06 00:42:30.910786948	1976-01-27 13:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 13:19:57.787330159	1976-01-27 13:19:57.787330159	1976-08-13 12:05:04.034243737
-1978-08-05 14:41:05.501	1978-04-28 03:18:32.377543211	1978-11-13 02:03:38.624456789	1978-11-13 02:03:38.624456789	1978-04-28 03:18:32.377543211	1978-04-28 03:18:32.377543211	1978-11-13 02:03:38.624456789
-1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 20:23:45.200649478	1981-08-02 20:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 20:23:45.200649478
-1981-11-15 23:03:10.999338387	1981-08-08 11:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 11:40:37.875881598	1981-08-08 11:40:37.875881598	1982-02-23 10:25:44.122795176
-1985-07-20 09:30:11	1985-04-11 22:07:37.876543211	1985-10-27 20:52:44.123456789	1985-10-27 20:52:44.123456789	1985-04-11 22:07:37.876543211	1985-04-11 22:07:37.876543211	1985-10-27 20:52:44.123456789
-1985-11-18 16:37:54	1985-08-11 05:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 05:15:20.876543211	1985-08-11 05:15:20.876543211	1986-02-26 04:00:27.123456789
-1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 07:11:02.123456789	1987-06-01 07:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 07:11:02.123456789
-1987-05-28 13:52:07.900916635	1987-02-18 02:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 02:29:34.777459846	1987-02-18 02:29:34.777459846	1987-09-05 01:14:41.024373424
-1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 07:28:02.521048776	1999-01-24 07:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 07:28:02.521048776
-1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 04:21:43.520360728	2000-01-11 04:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 04:21:43.520360728
-2000-12-18 08:42:30.000595596	2000-09-09 21:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 21:19:56.877138807	2000-09-09 21:19:56.877138807	2001-03-27 20:05:03.124052385
-2002-05-10 05:29:48.990818073	2002-01-30 18:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 18:07:15.867361284	2002-01-30 18:07:15.867361284	2002-08-17 16:52:22.114274862
-2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 09:55:50.123489309	2004-01-01 09:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 09:55:50.123489309
-2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 07:36:46.123456789	2004-06-15 07:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 07:36:46.123456789
-2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 16:40:02.492213665	2007-05-19 16:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 16:40:02.492213665
-2009-01-21 10:49:07.108	2008-10-13 23:26:33.984543211	2009-04-30 22:11:40.231456789	2009-04-30 22:11:40.231456789	2008-10-13 23:26:33.984543211	2008-10-13 23:26:33.984543211	2009-04-30 22:11:40.231456789
-2010-04-08 02:43:35.861742727	2009-12-29 15:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 15:21:02.738285938	2009-12-29 15:21:02.738285938	2010-07-16 14:06:08.985199516
-2013-04-07 02:44:43.00086821	2012-12-28 15:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 15:22:09.877411421	2012-12-28 15:22:09.877411421	2013-07-15 14:07:16.124324999
-2013-04-10 00:43:46.854731546	2012-12-31 13:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 13:21:13.731274757	2012-12-31 13:21:13.731274757	2013-07-18 12:06:19.978188335
-2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 14:41:05.537111954	2022-01-01 14:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 14:41:05.537111954
-2024-11-11 16:42:41.101	2024-08-04 05:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 05:20:07.977543211	2024-08-04 05:20:07.977543211	2025-02-19 04:05:14.224456789
+1966-08-16 13:36:50.183618031	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482	1966-11-23 23:59:23.30707482	1966-05-09 02:14:17.060161242	1966-05-09 02:14:17.060161242	1966-11-23 23:59:23.30707482
+1973-04-17 06:30:38.596784156	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945	1973-07-25 18:53:11.720240945	1973-01-07 19:08:05.473327367	1973-01-07 19:08:05.473327367	1973-07-25 18:53:11.720240945
+1974-10-04 17:21:03.989	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789	1975-01-12 03:43:37.112456789	1974-06-27 05:58:30.865543211	1974-06-27 05:58:30.865543211	1975-01-12 03:43:37.112456789
+1976-03-03 04:54:33.000895162	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951	1976-06-10 17:17:06.124351951	1975-11-24 17:31:59.877438373	1975-11-24 17:31:59.877438373	1976-06-10 17:17:06.124351951
+1976-05-06 00:42:30.910786948	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737	1976-08-13 12:05:04.034243737	1976-01-27 12:19:57.787330159	1976-01-27 12:19:57.787330159	1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789	1978-11-13 01:03:38.624456789	1978-04-28 02:18:32.377543211	1978-04-28 02:18:32.377543211	1978-11-13 01:03:38.624456789
+1981-04-25 09:01:12.077192689	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478	1981-08-02 21:23:45.200649478	1981-01-15 21:38:38.9537359	1981-01-15 21:38:38.9537359	1981-08-02 21:23:45.200649478
+1981-11-15 23:03:10.999338387	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176	1982-02-23 10:25:44.122795176	1981-08-08 12:40:37.875881598	1981-08-08 12:40:37.875881598	1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789	1985-10-27 19:52:44.123456789	1985-04-11 21:07:37.876543211	1985-04-11 21:07:37.876543211	1985-10-27 19:52:44.123456789
+1985-11-18 16:37:54	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789	1986-02-26 04:00:27.123456789	1985-08-11 06:15:20.876543211	1985-08-11 06:15:20.876543211	1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789	1987-06-01 08:11:02.123456789	1986-11-14 08:25:55.876543211	1986-11-14 08:25:55.876543211	1987-06-01 08:11:02.123456789
+1987-05-28 13:52:07.900916635	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424	1987-09-05 01:14:41.024373424	1987-02-18 01:29:34.777459846	1987-02-18 01:29:34.777459846	1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776	1999-01-24 06:28:02.521048776	1998-07-09 08:42:56.274135198	1998-07-09 08:42:56.274135198	1999-01-24 06:28:02.521048776
+1999-10-03 16:59:10.396903939	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728	2000-01-11 03:21:43.520360728	1999-06-26 05:36:37.27344715	1999-06-26 05:36:37.27344715	2000-01-11 03:21:43.520360728
+2000-12-18 08:42:30.000595596	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385	2001-03-27 20:05:03.124052385	2000-09-09 22:19:56.877138807	2000-09-09 22:19:56.877138807	2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862	2002-08-17 16:52:22.114274862	2002-01-30 17:07:15.867361284	2002-01-30 17:07:15.867361284	2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309	2004-01-01 08:55:50.123489309	2003-06-16 11:10:43.876575731	2003-06-16 11:10:43.876575731	2004-01-01 08:55:50.123489309
+2004-03-07 20:14:13	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789	2004-06-15 08:36:46.123456789	2003-11-29 08:51:39.876543211	2003-11-29 08:51:39.876543211	2004-06-15 08:36:46.123456789
+2007-02-09 05:17:29.368756876	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665	2007-05-19 17:40:02.492213665	2006-11-01 17:54:56.245300087	2006-11-01 17:54:56.245300087	2007-05-19 17:40:02.492213665
+2009-01-21 10:49:07.108	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789	2009-04-30 23:11:40.231456789	2008-10-14 00:26:33.984543211	2008-10-14 00:26:33.984543211	2009-04-30 23:11:40.231456789
+2010-04-08 02:43:35.861742727	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516	2010-07-16 14:06:08.985199516	2009-12-29 14:21:02.738285938	2009-12-29 14:21:02.738285938	2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999	2013-07-15 14:07:16.124324999	2012-12-28 14:22:09.877411421	2012-12-28 14:22:09.877411421	2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335	2013-07-18 12:06:19.978188335	2012-12-31 12:21:13.731274757	2012-12-31 12:21:13.731274757	2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954	2022-01-01 13:41:05.537111954	2021-06-16 15:55:59.290198376	2021-06-16 15:55:59.290198376	2022-01-01 13:41:05.537111954
+2024-11-11 16:42:41.101	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789	2025-02-19 04:05:14.224456789	2024-08-04 06:20:07.977543211	2024-08-04 06:20:07.977543211	2025-02-19 04:05:14.224456789
 4143-07-08 10:53:27.252802259	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048	4143-10-15 22:16:00.376259048	4143-03-30 23:30:54.12934547	4143-03-30 23:30:54.12934547	4143-10-15 22:16:00.376259048
-4966-12-04 09:30:55.202	4966-08-26 22:08:22.078543211	4967-03-13 20:53:28.325456789	4967-03-13 20:53:28.325456789	4966-08-26 22:08:22.078543211	4966-08-26 22:08:22.078543211	4967-03-13 20:53:28.325456789
-5339-02-01 14:10:01.085678691	5338-10-25 02:47:27.962221902	5339-05-12 01:32:34.20913548	5339-05-12 01:32:34.20913548	5338-10-25 02:47:27.962221902	5338-10-25 02:47:27.962221902	5339-05-12 01:32:34.20913548
-5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 06:02:41.288456789	5345-01-12 06:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 06:02:41.288456789
+4966-12-04 09:30:55.202	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789	4967-03-13 21:53:28.325456789	4966-08-26 23:08:22.078543211	4966-08-26 23:08:22.078543211	4967-03-13 21:53:28.325456789
+5339-02-01 14:10:01.085678691	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548	5339-05-12 02:32:34.20913548	5338-10-25 03:47:27.962221902	5338-10-25 03:47:27.962221902	5339-05-12 02:32:34.20913548
+5344-10-04 18:40:08.165	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789	5345-01-12 05:02:41.288456789	5344-06-27 07:17:35.041543211	5344-06-27 07:17:35.041543211	5345-01-12 05:02:41.288456789
 5397-07-13 07:12:32.000896438	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227	5397-10-20 18:35:05.124353227	5397-04-04 19:49:58.877439649	5397-04-04 19:49:58.877439649	5397-10-20 18:35:05.124353227
 5966-07-09 03:30:50.597	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789	5966-10-16 14:53:23.720456789	5966-03-31 16:08:17.473543211	5966-03-31 16:08:17.473543211	5966-10-16 14:53:23.720456789
 6229-06-28 02:54:28.970117179	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968	6229-10-05 14:17:02.093573968	6229-03-20 15:31:55.84666039	6229-03-20 15:31:55.84666039	6229-10-05 14:17:02.093573968
-6482-04-27 12:07:38.073915413	6482-01-18 00:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-18 00:45:04.950458624	6482-01-18 00:45:04.950458624	6482-08-04 23:30:11.197372202
-6631-11-13 16:31:29.702202248	6631-08-06 05:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 05:08:56.578745459	6631-08-06 05:08:56.578745459	6632-02-21 03:54:02.825659037
-6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 05:50:01.124302461	6706-01-06 05:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 05:50:01.124302461
-6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 19:35:21.411240491	6731-05-22 19:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 19:35:21.411240491
-7160-12-02 06:00:24.81200852	7160-08-24 18:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 18:37:51.688551731	7160-08-24 18:37:51.688551731	7161-03-11 17:22:57.935465309
-7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 10:56:05.582806391	7409-12-16 10:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 10:56:05.582806391
+6482-04-27 12:07:38.073915413	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202	6482-08-04 23:30:11.197372202	6482-01-17 23:45:04.950458624	6482-01-17 23:45:04.950458624	6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037	6632-02-21 03:54:02.825659037	6631-08-06 06:08:56.578745459	6631-08-06 06:08:56.578745459	6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461	6706-01-06 04:50:01.124302461	6705-06-21 07:04:54.877388883	6705-06-21 07:04:54.877388883	6706-01-06 04:50:01.124302461
+6731-02-12 08:12:48.287783702	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491	6731-05-22 20:35:21.411240491	6730-11-04 20:50:15.164326913	6730-11-04 20:50:15.164326913	6731-05-22 20:35:21.411240491
+7160-12-02 06:00:24.81200852	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309	7161-03-11 17:22:57.935465309	7160-08-24 19:37:51.688551731	7160-08-24 19:37:51.688551731	7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391	7409-12-16 09:56:05.582806391	7409-05-31 12:10:59.335892813	7409-05-31 12:10:59.335892813	7409-12-16 09:56:05.582806391
 7503-06-23 23:14:17.486	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789	7503-10-01 10:36:50.609456789	7503-03-16 11:51:44.362543211	7503-03-16 11:51:44.362543211	7503-10-01 10:36:50.609456789
 8422-07-22 03:21:45.745036084	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873	8422-10-29 14:44:18.868492873	8422-04-13 15:59:12.621579295	8422-04-13 15:59:12.621579295	8422-10-29 14:44:18.868492873
-8521-01-16 20:42:05.668832388	8520-10-09 09:19:32.545375599	8521-04-26 08:04:38.792289177	8521-04-26 08:04:38.792289177	8520-10-09 09:19:32.545375599	8520-10-09 09:19:32.545375599	8521-04-26 08:04:38.792289177
-9075-06-13 16:20:09.218517797	9075-03-06 04:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 04:57:36.095061008	9075-03-06 04:57:36.095061008	9075-09-21 03:42:42.341974586
-9209-11-11 04:08:58.223768453	9209-08-03 16:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 16:46:25.100311664	9209-08-03 16:46:25.100311664	9210-02-18 15:31:31.347225242
-9403-01-09 18:12:33.547	9402-10-02 06:50:00.423543211	9403-04-19 05:35:06.670456789	9403-04-19 05:35:06.670456789	9402-10-02 06:50:00.423543211	9402-10-02 06:50:00.423543211	9403-04-19 05:35:06.670456789
+8521-01-16 20:42:05.668832388	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177	8521-04-26 09:04:38.792289177	8520-10-09 10:19:32.545375599	8520-10-09 10:19:32.545375599	8521-04-26 09:04:38.792289177
+9075-06-13 16:20:09.218517797	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586	9075-09-21 03:42:42.341974586	9075-03-06 03:57:36.095061008	9075-03-06 03:57:36.095061008	9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242	9210-02-18 15:31:31.347225242	9209-08-03 17:46:25.100311664	9209-08-03 17:46:25.100311664	9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789	9403-04-19 06:35:06.670456789	9402-10-02 07:50:00.423543211	9402-10-02 07:50:00.423543211	9403-04-19 06:35:06.670456789
 PREHOOK: query: explain vectorization expression
 select
   interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,


[15/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
index cc7699b..654dab9 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
@@ -263,13 +263,13 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
-                    outputColumnNames: _col0, _col1, _col2, _col3, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
+                    expressions: to_unix_timestamp(ctimestamp1) (type: bigint), year(ctimestamp1) (type: int), month(ctimestamp1) (type: int), day(ctimestamp1) (type: int), dayofmonth(ctimestamp1) (type: int), weekofyear(ctimestamp1) (type: int), hour(ctimestamp1) (type: int), minute(ctimestamp1) (type: int), second(ctimestamp1) (type: int), cboolean1 (type: boolean), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), if(cboolean1, ctimestamp1, TIMESTAMP'1319-02-02 16:31:57.778') (type: timestamp), if(cboolean1, TIMESTAMP'2000-12-18 08:42:30.0005', ctimestamp1) (type: timestamp), if(cboolean1, ctimestamp1, ctimestamp2) (type: timestamp), if(cboolean1, ctimestamp1, null) (type: timestamp), if(cboolean1, null, ctimestamp2) (type: timestamp)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
-                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17]
-                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:ti
 mestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp
+                        projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 0, 1, 3, 14, 15, 16, 17, 18]
+                        selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 9:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 10:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 11:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 12:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 13:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-02-02 16:31:57.778) -> 14:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 08:42:30.0005, col 1:timestamp) -> 15:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 16:timestamp, IfExprColumnNull(col 0:boole
 an, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 17:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 18:timestamp
                     Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE
                     Reduce Output Operator
                       key expressions: _col0 (type: bigint)
@@ -279,7 +279,7 @@ STAGE PLANS:
                           native: true
                           nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                       Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE
-                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
+                      value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp)
             Execution mode: vectorized, llap
             LLAP IO: all inputs
             Map Vectorization:
@@ -301,12 +301,12 @@ STAGE PLANS:
                 vectorized: true
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: boolean), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp)
+                expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int), VALUE._col8 (type: boolean), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp), VALUE._col15 (type: timestamp)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
                 Select Vectorization:
                     className: VectorSelectOperator
                     native: true
-                    projectedOutputColumnNums: [0, 1, 2, 3, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
+                    projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
                 Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
@@ -371,14 +371,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
-1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
-1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
-163809583224	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
-490699811	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
+-45479000681	528	10	27	27	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+1632478712	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
+1632478712	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	NULL	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	NULL	NULL	NULL
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	1966-08-16 13:36:50.183	NULL	1966-08-16 13:36:50.183
+163809612024	7160	12	2	2	48	6	0	24	NULL	7160-12-02 06:00:24.81200852	NULL	1319-02-02 16:31:57.778	7160-12-02 06:00:24.81200852	NULL	NULL	NULL
+490725011	1985	7	20	20	29	9	30	11	true	1985-07-20 09:30:11	1319-02-02 16:31:57.778	1985-07-20 09:30:11	2000-12-18 08:42:30.0005	1985-07-20 09:30:11	1985-07-20 09:30:11	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	1319-02-02 16:31:57.778	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.028	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.028	NULL	1969-12-31 15:59:44.028
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	false	NULL	1969-12-31 15:59:44.809	1319-02-02 16:31:57.778	NULL	1969-12-31 15:59:44.809	NULL	1969-12-31 15:59:44.809
@@ -473,7 +473,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -565,14 +565,14 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
--2736272726	1883	4	17	17	16	4	14	34
--62018199211	4	9	24	22	39	18	26	29
-1365554626	2013	4	10	10	15	0	43	46
-206730996125	8521	1	16	16	3	20	42	5
-271176065	1978	8	5	5	31	14	41	5
-501179874	1985	11	18	18	47	16	37	54
-501179874	1985	11	18	18	47	16	37	54
-94573819855	4966	12	4	4	49	9	30	55
+-2736243926	1883	4	17	17	16	4	14	34
+-62018170411	4	9	22	22	39	18	26	29
+1365579826	2013	4	10	10	15	0	43	46
+206731024925	8521	1	16	16	3	20	42	5
+271201265	1978	8	5	5	31	14	41	5
+501208674	1985	11	18	18	47	16	37	54
+501208674	1985	11	18	18	47	16	37	54
+94573848655	4966	12	4	4	49	9	30	55
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
@@ -667,7 +667,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
+                    expressions: (to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1)) (type: boolean), (year(ctimestamp1) = year(CAST( stimestamp1 AS DATE))) (type: boolean), (month(ctimestamp1) = month(CAST( stimestamp1 AS DATE))) (type: boolean), (day(ctimestamp1) = day(CAST( stimestamp1 AS DATE))) (type: boolean), (dayofmonth(ctimestamp1) = dayofmonth(stimestamp1)) (type: boolean), (weekofyear(ctimestamp1) = weekofyear(CAST( stimestamp1 AS DATE))) (type: boolean), (hour(ctimestamp1) = hour(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (minute(ctimestamp1) = minute(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean), (second(ctimestamp1) = second(CAST( stimestamp1 AS TIMESTAMP))) (type: boolean)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -861,7 +861,7 @@ STAGE PLANS:
                   TableScan Vectorization:
                       native: true
                   Select Operator
-                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), day(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
+                    expressions: to_unix_timestamp(stimestamp1) (type: bigint), year(CAST( stimestamp1 AS DATE)) (type: int), month(CAST( stimestamp1 AS DATE)) (type: int), day(CAST( stimestamp1 AS DATE)) (type: int), dayofmonth(stimestamp1) (type: int), weekofyear(CAST( stimestamp1 AS DATE)) (type: int), hour(CAST( stimestamp1 AS TIMESTAMP)) (type: int), minute(CAST( stimestamp1 AS TIMESTAMP)) (type: int), second(CAST( stimestamp1 AS TIMESTAMP)) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
@@ -953,7 +953,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
@@ -1218,7 +1218,7 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-2.89160478029166E11
+2.89160863229166E11
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
   round(avg(ctimestamp1), 0),
   variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19,
@@ -1377,4 +1377,4 @@ FROM alltypesorc_string
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_string
 #### A masked pattern was here ####
-3.6145059754E10	false	false	false	7.5245178084814E10	7.5245178084814E10	7.5245178084814E10	8.0440478971476E10
+3.6145107904E10	false	false	false	7.5245155692476E10	7.5245155692476E10	7.5245155692476E10	8.0440455033059E10

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out
index c7f6215..82d43c1 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_ints_casts.q.out
@@ -136,32 +136,32 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1969-12-08 18:43:03.25	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:59.964	1969-12-31 23:59:59.8	NULL	1970-01-19 12:24:39	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:59.97	1969-12-31 23:59:59.8	NULL	1970-01-17 13:10:52.25	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 22:53:20.971	1970-01-13 04:45:23.25	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:59.949	NULL	1970-01-09 15:39:13.882	1969-12-09 15:45:32.75	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:00.02	1970-01-01 00:00:15.601	NULL	1969-12-27 19:19:26.75	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:59.962	1970-01-01 00:00:15.601	NULL	1969-12-10 11:41:51	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:59.995	1970-01-01 00:00:15.601	NULL	1970-01-08 02:06:56	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:00.048	1970-01-01 00:00:15.601	NULL	1969-12-22 19:03:59	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 08:12:58.862	1969-12-21 05:16:47.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-30 19:24:23.566	1969-12-16 19:20:17.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-10 07:39:39.664	1970-01-11 01:09:21.5	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1969-12-24 05:59:27.689	1970-01-19 09:16:31.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 07:29:48.972	1969-12-10 10:41:39	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:00.008	NULL	1970-01-11 18:34:27.246	1970-01-14 22:49:59.25	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:59.941	1969-12-31 23:59:52.804	NULL	1969-12-13 10:11:50	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:59.979	1969-12-31 23:59:52.804	NULL	1970-01-18 20:27:09	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:59.94	1969-12-31 23:59:52.804	NULL	1970-01-18 13:11:54.75	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:59.986	1969-12-31 23:59:52.804	NULL	1969-12-14 00:50:00.5	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:00.059	1969-12-31 23:59:52.804	NULL	1969-12-18 19:57:25.5	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:59.992	1969-12-31 23:59:52.804	NULL	1969-12-10 14:06:48.5	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:00.005	1969-12-31 23:59:52.804	NULL	1969-12-20 05:53:12.5	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:59.976	1969-12-31 23:59:52.804	NULL	1970-01-10 14:18:31	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:59.95	1969-12-31 23:59:52.804	NULL	1969-12-20 01:33:32.75	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-31 06:03:04.018	1970-01-21 20:50:53.75	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:00.011	NULL	1969-12-28 02:49:09.583	1970-01-15 06:35:27	1970-01-01 00:00:11	NULL	1970-01-01 00:00:00.001	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 14:53:20.971	1970-01-12 20:45:23.25	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:59.949	NULL	1970-01-09 07:39:13.882	1969-12-09 07:45:32.75	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-24 00:12:58.862	1969-12-20 21:16:47.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-30 11:24:23.566	1969-12-16 11:20:17.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-09 23:39:39.664	1970-01-10 17:09:21.5	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1969-12-23 21:59:27.689	1970-01-19 01:16:31.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-30 22:03:04.018	1970-01-21 12:50:53.75	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:00.011	NULL	1969-12-27 18:49:09.583	1970-01-14 22:35:27	1969-12-31 16:00:11	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
 PREHOOK: query: explain vectorization expression
 select
 
@@ -300,29 +300,29 @@ where cbigint % 250 = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1906-06-05 21:34:10	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.748	NULL	NULL
-1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	2020-09-12 02:50:00	1969-12-31 23:59:24	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.817	NULL	NULL
-1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	2015-04-24 05:10:50	1969-12-31 23:59:30	1969-12-31 23:56:40	NULL	1970-01-01 00:00:00	1969-12-31 16:00:12.935	NULL	NULL
-1969-12-31 23:59:09	NULL	1994-07-07 17:09:31	2003-05-26 04:27:30	1969-12-31 23:59:09	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1969-12-31 23:59:09	NULL	1993-09-09 05:51:22	1908-10-29 15:05:50	1969-12-31 23:59:09	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:08.451	NULL	NULL
-1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1958-07-08 04:05:50	1970-01-01 00:00:20	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:45.129	NULL	NULL
-1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1911-02-07 09:30:00	1969-12-31 23:59:22	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:58.614	NULL	NULL
-1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1989-05-29 03:33:20	1969-12-31 23:59:55	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 16:00:04.679	NULL	NULL
-1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1944-10-18 10:23:20	1970-01-01 00:00:48	1970-01-01 04:20:01	NULL	1970-01-01 00:00:00	1969-12-31 15:59:50.235	NULL	NULL
-1970-01-01 00:00:08	NULL	1949-01-13 08:21:02	1940-06-26 23:47:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1966-09-27 14:32:46	1928-05-26 18:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1995-07-08 05:01:04	1997-07-06 03:58:20	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1948-10-12 15:01:29	2020-05-04 11:20:50	1970-01-01 00:00:08	NULL	1970-01-01 00:00:00	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1998-03-27 08:56:12	1910-12-27 14:10:00	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1970-01-01 00:00:08	NULL	1999-07-01 22:14:06	2008-03-13 09:07:30	1970-01-01 00:00:08	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:15.892	NULL	NULL
-1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1919-02-22 21:13:20	1969-12-31 23:59:01	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:13.15	NULL	NULL
-1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	2018-11-17 04:30:00	1969-12-31 23:59:39	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:55.9	NULL	NULL
-1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	2018-01-18 22:32:30	1969-12-31 23:59:00	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:52.408	NULL	NULL
-1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1920-10-24 17:28:20	1969-12-31 23:59:46	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:11.065	NULL	NULL
-1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1933-12-12 13:05:00	1970-01-01 00:00:59	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.956	NULL	NULL
-1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1911-05-19 01:28:20	1969-12-31 23:59:52	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:03.136	NULL	NULL
-1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1937-10-26 06:48:20	1970-01-01 00:00:05	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 16:00:10.973	NULL	NULL
-1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1996-04-10 04:36:40	1969-12-31 23:59:36	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:53.145	NULL	NULL
-1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1937-04-28 23:05:50	1969-12-31 23:59:10	1969-12-31 22:00:04	NULL	1970-01-01 00:00:00	1969-12-31 15:59:54.733	NULL	NULL
-1970-01-01 00:00:11	NULL	1967-12-15 03:06:58	2027-02-19 16:15:50	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
-1970-01-01 00:00:11	NULL	1959-05-16 11:19:43	2009-01-30 14:50:00	1970-01-01 00:00:11	NULL	1970-01-01 00:00:01	1970-01-01 00:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1906-06-05 13:34:10	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL
+1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	2020-09-11 19:50:00	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL
+1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	2015-04-23 22:10:50	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL
+1969-12-31 15:59:09	NULL	1994-07-07 10:09:31	2003-05-25 21:27:30	1969-12-31 15:59:09	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 15:59:09	NULL	1993-09-08 22:51:22	1908-10-29 07:05:50	1969-12-31 15:59:09	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:08.451	NULL	NULL
+1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1958-07-07 21:05:50	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL
+1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1911-02-07 01:30:00	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL
+1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1989-05-28 20:33:20	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL
+1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1944-10-18 03:23:20	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL
+1969-12-31 16:00:08	NULL	1949-01-13 00:21:02	1940-06-26 15:47:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1966-09-27 07:32:46	1928-05-26 10:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1995-07-07 22:01:04	1997-07-05 20:58:20	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1948-10-12 08:01:29	2020-05-04 04:20:50	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1998-03-27 00:56:12	1910-12-27 06:10:00	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 16:00:08	NULL	1999-07-01 15:14:06	2008-03-13 02:07:30	1969-12-31 16:00:08	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL
+1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1919-02-22 13:13:20	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL
+1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	2018-11-16 20:30:00	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL
+1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	2018-01-18 14:32:30	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL
+1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1920-10-24 09:28:20	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL
+1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1933-12-12 05:05:00	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL
+1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1911-05-18 17:28:20	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL
+1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1937-10-25 22:48:20	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL
+1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1996-04-09 21:36:40	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL
+1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1937-04-28 15:05:50	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL
+1969-12-31 16:00:11	NULL	1967-12-14 19:06:58	2027-02-19 08:15:50	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL
+1969-12-31 16:00:11	NULL	1959-05-16 04:19:43	2009-01-30 06:50:00	1969-12-31 16:00:11	NULL	1969-12-31 16:00:01	1969-12-31 16:00:00	1969-12-31 16:00:02.351	NULL	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap_uncompressed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap_uncompressed.q.out
index fd3ed1d..a11a30c 100644
--- a/ql/src/test/results/clientpositive/llap_uncompressed.q.out
+++ b/ql/src/test/results/clientpositive/llap_uncompressed.q.out
@@ -137,7 +137,7 @@ POSTHOOK: query: select sum(hash(*)) from llap_temp_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@llap_temp_table
 #### A masked pattern was here ####
--107598224420
+-42787391908
 PREHOOK: query: explain
 select * from orc_llap_n0 where cint > 10 and cint < 5000000
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/localtimezone.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/localtimezone.q.out b/ql/src/test/results/clientpositive/localtimezone.q.out
index b4c6d86..a76e4a3 100644
--- a/ql/src/test/results/clientpositive/localtimezone.q.out
+++ b/ql/src/test/results/clientpositive/localtimezone.q.out
@@ -19,24 +19,24 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@date_test
 PREHOOK: query: insert into `date_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: default@date_test
 POSTHOOK: query: insert into `date_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@date_test
@@ -50,24 +50,24 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@timestamp_test
 PREHOOK: query: insert into `timestamp_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: default@timestamp_test
 POSTHOOK: query: insert into `timestamp_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@timestamp_test
@@ -81,24 +81,24 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@timestamptz_test
 PREHOOK: query: insert into `timestamptz_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: default@timestamptz_test
 POSTHOOK: query: insert into `timestamptz_test` VALUES
-  (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
-  (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
+  ('2011-01-01 01:01:01.123'),
+  ('2011-01-01 01:01:01.123 Europe/Rome'),
+  ('2011-01-01 01:01:01.123 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912'),
+  ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
+  ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
+  ('2011-01-01 01:01:01.12345678912 xyz')
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@timestamptz_test

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/localtimezone2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/localtimezone2.q.out b/ql/src/test/results/clientpositive/localtimezone2.q.out
deleted file mode 100644
index fb4bd17..0000000
--- a/ql/src/test/results/clientpositive/localtimezone2.q.out
+++ /dev/null
@@ -1,148 +0,0 @@
-PREHOOK: query: drop table `table_tsltz`
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table `table_tsltz`
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE table_tsltz (tz VARCHAR(200),
-                         c_ts1 TIMESTAMP,
-                         c_ts2 TIMESTAMP,
-                         c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
-                         c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@table_tsltz
-POSTHOOK: query: CREATE TABLE table_tsltz (tz VARCHAR(200),
-                         c_ts1 TIMESTAMP,
-                         c_ts2 TIMESTAMP,
-                         c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
-                         c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@table_tsltz
-PREHOOK: query: insert into table_tsltz values (
-  '-08:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@table_tsltz
-POSTHOOK: query: insert into table_tsltz values (
-  '-08:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@table_tsltz
-POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
-PREHOOK: query: insert into table_tsltz values (
-  'UTC',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@table_tsltz
-POSTHOOK: query: insert into table_tsltz values (
-  'UTC',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@table_tsltz
-POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
-PREHOOK: query: insert into table_tsltz values (
-  '+02:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@table_tsltz
-POSTHOOK: query: insert into table_tsltz values (
-  '+02:00',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@table_tsltz
-POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
-PREHOOK: query: insert into table_tsltz values (
-  'US/Pacific',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@table_tsltz
-POSTHOOK: query: insert into table_tsltz values (
-  'US/Pacific',
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
-  cast('2016-01-01 00:00:00' as timestamp),
-  cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@table_tsltz
-POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
-POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
-PREHOOK: query: select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz
-PREHOOK: type: QUERY
-PREHOOK: Input: default@table_tsltz
-#### A masked pattern was here ####
-POSTHOOK: query: select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@table_tsltz
-#### A masked pattern was here ####
--08:00	2016-01-01 00:00:00	2015-12-31 21:00:00	2016-01-01 00:00:00.0 US/Pacific	2015-12-31 21:00:00.0 US/Pacific
-UTC	2016-01-01 00:00:00	2016-01-01 05:00:00	2015-12-31 16:00:00.0 US/Pacific	2015-12-31 21:00:00.0 US/Pacific
-+02:00	2016-01-01 00:00:00	2016-01-01 07:00:00	2015-12-31 14:00:00.0 US/Pacific	2015-12-31 21:00:00.0 US/Pacific
-US/Pacific	2016-01-01 00:00:00	2015-12-31 21:00:00	2016-01-01 00:00:00.0 US/Pacific	2015-12-31 21:00:00.0 US/Pacific
-PREHOOK: query: select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz
-PREHOOK: type: QUERY
-PREHOOK: Input: default@table_tsltz
-#### A masked pattern was here ####
-POSTHOOK: query: select tz,
-    c_ts1, c_ts2,
-    cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
-from table_tsltz
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@table_tsltz
-#### A masked pattern was here ####
--08:00	2016-01-01 00:00:00	2015-12-31 21:00:00	2016-01-01 08:00:00.0 UTC	2016-01-01 05:00:00.0 UTC
-UTC	2016-01-01 00:00:00	2016-01-01 05:00:00	2016-01-01 00:00:00.0 UTC	2016-01-01 05:00:00.0 UTC
-+02:00	2016-01-01 00:00:00	2016-01-01 07:00:00	2015-12-31 22:00:00.0 UTC	2016-01-01 05:00:00.0 UTC
-US/Pacific	2016-01-01 00:00:00	2015-12-31 21:00:00	2016-01-01 08:00:00.0 UTC	2016-01-01 05:00:00.0 UTC

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_file_dump.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_file_dump.q.out b/ql/src/test/results/clientpositive/orc_file_dump.q.out
index 77be7a5..5101312 100644
--- a/ql/src/test/results/clientpositive/orc_file_dump.q.out
+++ b/ql/src/test/results/clientpositive/orc_file_dump.q.out
@@ -129,7 +129,7 @@ File Statistics:
   Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278
 
 Stripes:
-  Stripe: offset: 3 data: 22405 rows: 1049 tail: 242 index: 8956
+  Stripe: offset: 3 data: 22405 rows: 1049 tail: 253 index: 8956
     Stream: column 0 section ROW_INDEX start: 3 length 20
     Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 34
     Stream: column 1 section ROW_INDEX start: 57 length 58
@@ -269,7 +269,7 @@ Stripes:
       Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8
       Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8
 
-File length: 32298 bytes
+File length: 32309 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -326,7 +326,7 @@ File Statistics:
   Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278
 
 Stripes:
-  Stripe: offset: 3 data: 22405 rows: 1049 tail: 240 index: 13603
+  Stripe: offset: 3 data: 22405 rows: 1049 tail: 248 index: 13603
     Stream: column 0 section ROW_INDEX start: 3 length 20
     Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 43
     Stream: column 1 section ROW_INDEX start: 66 length 58
@@ -466,7 +466,7 @@ Stripes:
       Entry 1: numHashFunctions: 7 bitCount: 9600 popCount: 174 loadFactor: 0.0181 expectedFpp: 6.426078E-13
       Stripe level merge: numHashFunctions: 7 bitCount: 9600 popCount: 181 loadFactor: 0.0189 expectedFpp: 8.4693775E-13
 
-File length: 36943 bytes
+File length: 36950 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -535,7 +535,7 @@ File Statistics:
   Column 11: count: 1049 hasNull: false bytesOnDisk: 2468 sum: 13278
 
 Stripes:
-  Stripe: offset: 3 data: 22405 rows: 1049 tail: 242 index: 8956
+  Stripe: offset: 3 data: 22405 rows: 1049 tail: 253 index: 8956
     Stream: column 0 section ROW_INDEX start: 3 length 20
     Stream: column 0 section BLOOM_FILTER_UTF8 start: 23 length 34
     Stream: column 1 section ROW_INDEX start: 57 length 58
@@ -675,7 +675,7 @@ Stripes:
       Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8
       Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8
 
-File length: 32298 bytes
+File length: 32309 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/orc_merge11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_merge11.q.out b/ql/src/test/results/clientpositive/orc_merge11.q.out
index 5327299..8e7840c 100644
--- a/ql/src/test/results/clientpositive/orc_merge11.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge11.q.out
@@ -96,7 +96,7 @@ File Statistics:
   Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -155,7 +155,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 6672 bytes
+File length: 6685 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -187,7 +187,7 @@ File Statistics:
   Column 5: count: 50000 hasNull: false bytesOnDisk: 64 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -246,7 +246,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 6672 bytes
+File length: 6685 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________
@@ -306,7 +306,7 @@ File Statistics:
   Column 5: count: 100000 hasNull: false bytesOnDisk: 128 min: 1969-12-31 16:00:00.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:00:00.0 max UTC: 1969-12-31 08:04:10.0
 
 Stripes:
-  Stripe: offset: 3 data: 5761 rows: 50000 tail: 99 index: 433
+  Stripe: offset: 3 data: 5761 rows: 50000 tail: 112 index: 433
     Stream: column 0 section ROW_INDEX start: 3 length 17
     Stream: column 1 section ROW_INDEX start: 20 length 73
     Stream: column 2 section ROW_INDEX start: 93 length 79
@@ -364,22 +364,22 @@ Stripes:
       Entry 2: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,354,22,0,156,32
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
-  Stripe: offset: 6296 data: 5761 rows: 50000 tail: 99 index: 433
-    Stream: column 0 section ROW_INDEX start: 6296 length 17
-    Stream: column 1 section ROW_INDEX start: 6313 length 73
-    Stream: column 2 section ROW_INDEX start: 6386 length 79
-    Stream: column 3 section ROW_INDEX start: 6465 length 85
-    Stream: column 4 section ROW_INDEX start: 6550 length 92
-    Stream: column 5 section ROW_INDEX start: 6642 length 87
-    Stream: column 1 section DATA start: 6729 length 30
-    Stream: column 2 section DATA start: 6759 length 24
-    Stream: column 2 section LENGTH start: 6783 length 8
-    Stream: column 2 section DICTIONARY_DATA start: 6791 length 23
-    Stream: column 3 section DATA start: 6814 length 5114
-    Stream: column 4 section DATA start: 11928 length 480
-    Stream: column 4 section SECONDARY start: 12408 length 18
-    Stream: column 5 section DATA start: 12426 length 46
-    Stream: column 5 section SECONDARY start: 12472 length 18
+  Stripe: offset: 6309 data: 5761 rows: 50000 tail: 112 index: 433
+    Stream: column 0 section ROW_INDEX start: 6309 length 17
+    Stream: column 1 section ROW_INDEX start: 6326 length 73
+    Stream: column 2 section ROW_INDEX start: 6399 length 79
+    Stream: column 3 section ROW_INDEX start: 6478 length 85
+    Stream: column 4 section ROW_INDEX start: 6563 length 92
+    Stream: column 5 section ROW_INDEX start: 6655 length 87
+    Stream: column 1 section DATA start: 6742 length 30
+    Stream: column 2 section DATA start: 6772 length 24
+    Stream: column 2 section LENGTH start: 6796 length 8
+    Stream: column 2 section DICTIONARY_DATA start: 6804 length 23
+    Stream: column 3 section DATA start: 6827 length 5114
+    Stream: column 4 section DATA start: 11941 length 480
+    Stream: column 4 section SECONDARY start: 12421 length 18
+    Stream: column 5 section DATA start: 12439 length 46
+    Stream: column 5 section SECONDARY start: 12485 length 18
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT_V2
     Encoding column 2: DICTIONARY_V2[6]
@@ -423,7 +423,7 @@ Stripes:
       Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,506,294,0,232,304
       Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 min UTC: 1969-12-31 08:04:10.0 max UTC: 1969-12-31 08:04:10.0 positions: 0,666,54,0,312,64
 
-File length: 12978 bytes
+File length: 13004 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________


[09/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
index acb2fb9..d29d704 100644
--- a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
+++ b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
@@ -111,7 +111,7 @@ POSTHOOK: query: select sum(hash(*)) from alltypesorc3xcols
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc3xcols
 POSTHOOK: Output: hdfs://### HDFS PATH ###
--73477136966
+-302946892512
 PREHOOK: query: alter table alltypesorc3xcols concatenate
 PREHOOK: type: ALTER_TABLE_MERGE
 PREHOOK: Input: default@alltypesorc3xcols
@@ -137,7 +137,7 @@ POSTHOOK: query: select sum(hash(*)) from alltypesorc3xcols
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc3xcols
 POSTHOOK: Output: hdfs://### HDFS PATH ###
--73477136966
+-302946892512
 PREHOOK: query: select * from alltypesorc3xcols limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypesorc3xcols
@@ -147,8 +147,8 @@ Structure for hdfs://### HDFS PATH ###
 File Version: 0.12 with ORC_135
 Rows: 24576
 Compression: ZLIB
-Compression size: 131072
-Type: struct<atinyint:tinyint,asmallint:smallint,aint:int,abigint:bigint,afloat:float,adouble:double,astring1:string,astring2:string,atimestamp1:timestamp,atimestamp2:timestamp,aboolean1:boolean,aboolean2:boolean,btinyint:tinyint,bsmallint:smallint,bint:int,bbigint:bigint,bfloat:float,bdouble:double,bstring1:string,bstring2:string,btimestamp1:timestamp,btimestamp2:timestamp,bboolean1:boolean,bboolean2:boolean,ctinyint:tinyint,csmallint:smallint,cint:int,cbigint:bigint,cfloat:float,cdouble:double,cstring1:string,cstring2:string,ctimestamp1:timestamp,ctimestamp2:timestamp,cboolean1:boolean,cboolean2:boolean>
+Compression size: 262144
+Type: struct<_col0:tinyint,_col1:smallint,_col2:int,_col3:bigint,_col4:float,_col5:double,_col6:string,_col7:string,_col8:timestamp,_col9:timestamp,_col10:boolean,_col11:boolean,_col12:tinyint,_col13:smallint,_col14:int,_col15:bigint,_col16:float,_col17:double,_col18:string,_col19:string,_col20:timestamp,_col21:timestamp,_col22:boolean,_col23:boolean,_col24:tinyint,_col25:smallint,_col26:int,_col27:bigint,_col28:float,_col29:double,_col30:string,_col31:string,_col32:timestamp,_col33:timestamp,_col34:boolean,_col35:boolean>
 
 Stripe Statistics:
   Stripe 1:
@@ -161,8 +161,8 @@ Stripe Statistics:
     Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
     Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
-    Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 11: count: 9174 hasNull: true true: 6138
     Column 12: count: 9173 hasNull: true true: 3983
     Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -173,8 +173,8 @@ Stripe Statistics:
     Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 23: count: 9174 hasNull: true true: 6138
     Column 24: count: 9173 hasNull: true true: 3983
     Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -185,8 +185,8 @@ Stripe Statistics:
     Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 35: count: 9174 hasNull: true true: 6138
     Column 36: count: 9173 hasNull: true true: 3983
   Stripe 2:
@@ -199,8 +199,8 @@ Stripe Statistics:
     Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
     Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
-    Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 11: count: 9174 hasNull: true true: 6138
     Column 12: count: 9173 hasNull: true true: 3983
     Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -211,8 +211,8 @@ Stripe Statistics:
     Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 23: count: 9174 hasNull: true true: 6138
     Column 24: count: 9173 hasNull: true true: 3983
     Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -223,8 +223,8 @@ Stripe Statistics:
     Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
     Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
     Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
-    Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-    Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+    Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+    Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
     Column 35: count: 9174 hasNull: true true: 6138
     Column 36: count: 9173 hasNull: true true: 3983
 
@@ -238,8 +238,8 @@ File Statistics:
   Column 6: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 7: count: 24576 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 884736
   Column 8: count: 24576 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 1769472
-  Column 9: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 10: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 9: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 10: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 11: count: 18348 hasNull: true true: 12276
   Column 12: count: 18346 hasNull: true true: 7966
   Column 13: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -250,8 +250,8 @@ File Statistics:
   Column 18: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 19: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
   Column 20: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
-  Column 21: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 22: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 21: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 22: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 23: count: 18348 hasNull: true true: 12276
   Column 24: count: 18346 hasNull: true true: 7966
   Column 25: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -262,136 +262,136 @@ File Statistics:
   Column 30: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
   Column 31: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
   Column 32: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
-  Column 33: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
-  Column 34: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+  Column 33: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
+  Column 34: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808
   Column 35: count: 18348 hasNull: true true: 12276
   Column 36: count: 18346 hasNull: true true: 7966
 
 Stripes:
-  Stripe: offset: 3 data: 1498336 rows: 12288 tail: 493 index: 2821
+  Stripe: offset: 3 data: 1500017 rows: 12288 tail: 501 index: 2836
     Stream: column 0 section ROW_INDEX start: 3 length 21
     Stream: column 1 section ROW_INDEX start: 24 length 53
     Stream: column 2 section ROW_INDEX start: 77 length 67
-    Stream: column 3 section ROW_INDEX start: 144 length 79
-    Stream: column 4 section ROW_INDEX start: 223 length 83
-    Stream: column 5 section ROW_INDEX start: 306 length 77
-    Stream: column 6 section ROW_INDEX start: 383 length 77
-    Stream: column 7 section ROW_INDEX start: 460 length 170
-    Stream: column 8 section ROW_INDEX start: 630 length 264
-    Stream: column 9 section ROW_INDEX start: 894 length 63
-    Stream: column 10 section ROW_INDEX start: 957 length 57
-    Stream: column 11 section ROW_INDEX start: 1014 length 47
-    Stream: column 12 section ROW_INDEX start: 1061 length 47
-    Stream: column 13 section ROW_INDEX start: 1108 length 53
-    Stream: column 14 section ROW_INDEX start: 1161 length 67
-    Stream: column 15 section ROW_INDEX start: 1228 length 79
-    Stream: column 16 section ROW_INDEX start: 1307 length 83
-    Stream: column 17 section ROW_INDEX start: 1390 length 77
-    Stream: column 18 section ROW_INDEX start: 1467 length 77
-    Stream: column 19 section ROW_INDEX start: 1544 length 115
-    Stream: column 20 section ROW_INDEX start: 1659 length 93
-    Stream: column 21 section ROW_INDEX start: 1752 length 63
-    Stream: column 22 section ROW_INDEX start: 1815 length 57
-    Stream: column 23 section ROW_INDEX start: 1872 length 47
-    Stream: column 24 section ROW_INDEX start: 1919 length 47
-    Stream: column 25 section ROW_INDEX start: 1966 length 53
-    Stream: column 26 section ROW_INDEX start: 2019 length 67
-    Stream: column 27 section ROW_INDEX start: 2086 length 79
-    Stream: column 28 section ROW_INDEX start: 2165 length 83
-    Stream: column 29 section ROW_INDEX start: 2248 length 77
-    Stream: column 30 section ROW_INDEX start: 2325 length 77
-    Stream: column 31 section ROW_INDEX start: 2402 length 115
-    Stream: column 32 section ROW_INDEX start: 2517 length 93
-    Stream: column 33 section ROW_INDEX start: 2610 length 63
-    Stream: column 34 section ROW_INDEX start: 2673 length 57
-    Stream: column 35 section ROW_INDEX start: 2730 length 47
-    Stream: column 36 section ROW_INDEX start: 2777 length 47
-    Stream: column 1 section PRESENT start: 2824 length 51
-    Stream: column 1 section DATA start: 2875 length 5448
-    Stream: column 2 section PRESENT start: 8323 length 53
-    Stream: column 2 section DATA start: 8376 length 12078
-    Stream: column 3 section PRESENT start: 20454 length 53
-    Stream: column 3 section DATA start: 20507 length 24479
-    Stream: column 4 section PRESENT start: 44986 length 52
-    Stream: column 4 section DATA start: 45038 length 24479
-    Stream: column 5 section PRESENT start: 69517 length 51
-    Stream: column 5 section DATA start: 69568 length 9927
-    Stream: column 6 section PRESENT start: 79495 length 53
-    Stream: column 6 section DATA start: 79548 length 19755
-    Stream: column 7 section DATA start: 99303 length 259558
-    Stream: column 7 section LENGTH start: 358861 length 12
-    Stream: column 8 section DATA start: 358873 length 518777
-    Stream: column 8 section LENGTH start: 877650 length 12
-    Stream: column 9 section PRESENT start: 877662 length 52
-    Stream: column 9 section DATA start: 877714 length 7769
-    Stream: column 9 section SECONDARY start: 885483 length 9448
-    Stream: column 10 section PRESENT start: 894931 length 58
-    Stream: column 10 section DATA start: 894989 length 7778
-    Stream: column 10 section SECONDARY start: 902767 length 9469
-    Stream: column 11 section PRESENT start: 912236 length 51
-    Stream: column 11 section DATA start: 912287 length 782
-    Stream: column 12 section PRESENT start: 913069 length 54
-    Stream: column 12 section DATA start: 913123 length 783
-    Stream: column 13 section PRESENT start: 913906 length 51
-    Stream: column 13 section DATA start: 913957 length 5448
-    Stream: column 14 section PRESENT start: 919405 length 53
-    Stream: column 14 section DATA start: 919458 length 12078
-    Stream: column 15 section PRESENT start: 931536 length 53
-    Stream: column 15 section DATA start: 931589 length 24479
-    Stream: column 16 section PRESENT start: 956068 length 52
-    Stream: column 16 section DATA start: 956120 length 24479
-    Stream: column 17 section PRESENT start: 980599 length 51
-    Stream: column 17 section DATA start: 980650 length 9927
-    Stream: column 18 section PRESENT start: 990577 length 53
-    Stream: column 18 section DATA start: 990630 length 19755
-    Stream: column 19 section PRESENT start: 1010385 length 51
-    Stream: column 19 section DATA start: 1010436 length 10942
-    Stream: column 19 section LENGTH start: 1021378 length 3722
-    Stream: column 19 section DICTIONARY_DATA start: 1025100 length 65435
-    Stream: column 20 section PRESENT start: 1090535 length 54
-    Stream: column 20 section DATA start: 1090589 length 10939
-    Stream: column 20 section LENGTH start: 1101528 length 3739
-    Stream: column 20 section DICTIONARY_DATA start: 1105267 length 66022
-    Stream: column 21 section PRESENT start: 1171289 length 52
-    Stream: column 21 section DATA start: 1171341 length 7769
-    Stream: column 21 section SECONDARY start: 1179110 length 9448
-    Stream: column 22 section PRESENT start: 1188558 length 58
-    Stream: column 22 section DATA start: 1188616 length 7778
-    Stream: column 22 section SECONDARY start: 1196394 length 9469
-    Stream: column 23 section PRESENT start: 1205863 length 51
-    Stream: column 23 section DATA start: 1205914 length 782
-    Stream: column 24 section PRESENT start: 1206696 length 54
-    Stream: column 24 section DATA start: 1206750 length 783
-    Stream: column 25 section PRESENT start: 1207533 length 51
-    Stream: column 25 section DATA start: 1207584 length 5448
-    Stream: column 26 section PRESENT start: 1213032 length 53
-    Stream: column 26 section DATA start: 1213085 length 12078
-    Stream: column 27 section PRESENT start: 1225163 length 53
-    Stream: column 27 section DATA start: 1225216 length 24479
-    Stream: column 28 section PRESENT start: 1249695 length 52
-    Stream: column 28 section DATA start: 1249747 length 24479
-    Stream: column 29 section PRESENT start: 1274226 length 51
-    Stream: column 29 section DATA start: 1274277 length 9927
-    Stream: column 30 section PRESENT start: 1284204 length 53
-    Stream: column 30 section DATA start: 1284257 length 19755
-    Stream: column 31 section PRESENT start: 1304012 length 51
-    Stream: column 31 section DATA start: 1304063 length 10942
-    Stream: column 31 section LENGTH start: 1315005 length 3722
-    Stream: column 31 section DICTIONARY_DATA start: 1318727 length 65435
-    Stream: column 32 section PRESENT start: 1384162 length 54
-    Stream: column 32 section DATA start: 1384216 length 10939
-    Stream: column 32 section LENGTH start: 1395155 length 3739
-    Stream: column 32 section DICTIONARY_DATA start: 1398894 length 66022
-    Stream: column 33 section PRESENT start: 1464916 length 52
-    Stream: column 33 section DATA start: 1464968 length 7769
-    Stream: column 33 section SECONDARY start: 1472737 length 9448
-    Stream: column 34 section PRESENT start: 1482185 length 58
-    Stream: column 34 section DATA start: 1482243 length 7778
-    Stream: column 34 section SECONDARY start: 1490021 length 9469
-    Stream: column 35 section PRESENT start: 1499490 length 51
-    Stream: column 35 section DATA start: 1499541 length 782
-    Stream: column 36 section PRESENT start: 1500323 length 54
-    Stream: column 36 section DATA start: 1500377 length 783
+    Stream: column 3 section ROW_INDEX start: 144 length 81
+    Stream: column 4 section ROW_INDEX start: 225 length 83
+    Stream: column 5 section ROW_INDEX start: 308 length 77
+    Stream: column 6 section ROW_INDEX start: 385 length 77
+    Stream: column 7 section ROW_INDEX start: 462 length 176
+    Stream: column 8 section ROW_INDEX start: 638 length 267
+    Stream: column 9 section ROW_INDEX start: 905 length 63
+    Stream: column 10 section ROW_INDEX start: 968 length 57
+    Stream: column 11 section ROW_INDEX start: 1025 length 47
+    Stream: column 12 section ROW_INDEX start: 1072 length 47
+    Stream: column 13 section ROW_INDEX start: 1119 length 53
+    Stream: column 14 section ROW_INDEX start: 1172 length 67
+    Stream: column 15 section ROW_INDEX start: 1239 length 81
+    Stream: column 16 section ROW_INDEX start: 1320 length 83
+    Stream: column 17 section ROW_INDEX start: 1403 length 77
+    Stream: column 18 section ROW_INDEX start: 1480 length 77
+    Stream: column 19 section ROW_INDEX start: 1557 length 115
+    Stream: column 20 section ROW_INDEX start: 1672 length 93
+    Stream: column 21 section ROW_INDEX start: 1765 length 63
+    Stream: column 22 section ROW_INDEX start: 1828 length 57
+    Stream: column 23 section ROW_INDEX start: 1885 length 47
+    Stream: column 24 section ROW_INDEX start: 1932 length 47
+    Stream: column 25 section ROW_INDEX start: 1979 length 53
+    Stream: column 26 section ROW_INDEX start: 2032 length 67
+    Stream: column 27 section ROW_INDEX start: 2099 length 81
+    Stream: column 28 section ROW_INDEX start: 2180 length 83
+    Stream: column 29 section ROW_INDEX start: 2263 length 77
+    Stream: column 30 section ROW_INDEX start: 2340 length 77
+    Stream: column 31 section ROW_INDEX start: 2417 length 115
+    Stream: column 32 section ROW_INDEX start: 2532 length 93
+    Stream: column 33 section ROW_INDEX start: 2625 length 63
+    Stream: column 34 section ROW_INDEX start: 2688 length 57
+    Stream: column 35 section ROW_INDEX start: 2745 length 47
+    Stream: column 36 section ROW_INDEX start: 2792 length 47
+    Stream: column 1 section PRESENT start: 2839 length 51
+    Stream: column 1 section DATA start: 2890 length 5448
+    Stream: column 2 section PRESENT start: 8338 length 53
+    Stream: column 2 section DATA start: 8391 length 12144
+    Stream: column 3 section PRESENT start: 20535 length 53
+    Stream: column 3 section DATA start: 20588 length 24618
+    Stream: column 4 section PRESENT start: 45206 length 52
+    Stream: column 4 section DATA start: 45258 length 24681
+    Stream: column 5 section PRESENT start: 69939 length 51
+    Stream: column 5 section DATA start: 69990 length 9927
+    Stream: column 6 section PRESENT start: 79917 length 53
+    Stream: column 6 section DATA start: 79970 length 19755
+    Stream: column 7 section DATA start: 99725 length 258570
+    Stream: column 7 section LENGTH start: 358295 length 108
+    Stream: column 8 section DATA start: 358403 length 517341
+    Stream: column 8 section LENGTH start: 875744 length 108
+    Stream: column 9 section PRESENT start: 875852 length 52
+    Stream: column 9 section DATA start: 875904 length 8045
+    Stream: column 9 section SECONDARY start: 883949 length 9555
+    Stream: column 10 section PRESENT start: 893504 length 58
+    Stream: column 10 section DATA start: 893562 length 8082
+    Stream: column 10 section SECONDARY start: 901644 length 9590
+    Stream: column 11 section PRESENT start: 911234 length 51
+    Stream: column 11 section DATA start: 911285 length 782
+    Stream: column 12 section PRESENT start: 912067 length 54
+    Stream: column 12 section DATA start: 912121 length 783
+    Stream: column 13 section PRESENT start: 912904 length 51
+    Stream: column 13 section DATA start: 912955 length 5448
+    Stream: column 14 section PRESENT start: 918403 length 53
+    Stream: column 14 section DATA start: 918456 length 12144
+    Stream: column 15 section PRESENT start: 930600 length 53
+    Stream: column 15 section DATA start: 930653 length 24618
+    Stream: column 16 section PRESENT start: 955271 length 52
+    Stream: column 16 section DATA start: 955323 length 24681
+    Stream: column 17 section PRESENT start: 980004 length 51
+    Stream: column 17 section DATA start: 980055 length 9927
+    Stream: column 18 section PRESENT start: 989982 length 53
+    Stream: column 18 section DATA start: 990035 length 19755
+    Stream: column 19 section PRESENT start: 1009790 length 51
+    Stream: column 19 section DATA start: 1009841 length 11009
+    Stream: column 19 section LENGTH start: 1020850 length 3722
+    Stream: column 19 section DICTIONARY_DATA start: 1024572 length 65435
+    Stream: column 20 section PRESENT start: 1090007 length 54
+    Stream: column 20 section DATA start: 1090061 length 11006
+    Stream: column 20 section LENGTH start: 1101067 length 3739
+    Stream: column 20 section DICTIONARY_DATA start: 1104806 length 66022
+    Stream: column 21 section PRESENT start: 1170828 length 52
+    Stream: column 21 section DATA start: 1170880 length 8045
+    Stream: column 21 section SECONDARY start: 1178925 length 9555
+    Stream: column 22 section PRESENT start: 1188480 length 58
+    Stream: column 22 section DATA start: 1188538 length 8082
+    Stream: column 22 section SECONDARY start: 1196620 length 9590
+    Stream: column 23 section PRESENT start: 1206210 length 51
+    Stream: column 23 section DATA start: 1206261 length 782
+    Stream: column 24 section PRESENT start: 1207043 length 54
+    Stream: column 24 section DATA start: 1207097 length 783
+    Stream: column 25 section PRESENT start: 1207880 length 51
+    Stream: column 25 section DATA start: 1207931 length 5448
+    Stream: column 26 section PRESENT start: 1213379 length 53
+    Stream: column 26 section DATA start: 1213432 length 12144
+    Stream: column 27 section PRESENT start: 1225576 length 53
+    Stream: column 27 section DATA start: 1225629 length 24618
+    Stream: column 28 section PRESENT start: 1250247 length 52
+    Stream: column 28 section DATA start: 1250299 length 24681
+    Stream: column 29 section PRESENT start: 1274980 length 51
+    Stream: column 29 section DATA start: 1275031 length 9927
+    Stream: column 30 section PRESENT start: 1284958 length 53
+    Stream: column 30 section DATA start: 1285011 length 19755
+    Stream: column 31 section PRESENT start: 1304766 length 51
+    Stream: column 31 section DATA start: 1304817 length 11009
+    Stream: column 31 section LENGTH start: 1315826 length 3722
+    Stream: column 31 section DICTIONARY_DATA start: 1319548 length 65435
+    Stream: column 32 section PRESENT start: 1384983 length 54
+    Stream: column 32 section DATA start: 1385037 length 11006
+    Stream: column 32 section LENGTH start: 1396043 length 3739
+    Stream: column 32 section DICTIONARY_DATA start: 1399782 length 66022
+    Stream: column 33 section PRESENT start: 1465804 length 52
+    Stream: column 33 section DATA start: 1465856 length 8045
+    Stream: column 33 section SECONDARY start: 1473901 length 9555
+    Stream: column 34 section PRESENT start: 1483456 length 58
+    Stream: column 34 section DATA start: 1483514 length 8082
+    Stream: column 34 section SECONDARY start: 1491596 length 9590
+    Stream: column 35 section PRESENT start: 1501186 length 51
+    Stream: column 35 section DATA start: 1501237 length 782
+    Stream: column 36 section PRESENT start: 1502019 length 54
+    Stream: column 36 section DATA start: 1502073 length 783
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT_V2
@@ -437,13 +437,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 2:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 3:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 4:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 5:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -452,16 +452,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 7:
       Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
     Row group indices for column 8:
       Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
     Row group indices for column 9:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 10:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 11:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -473,13 +473,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 14:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 15:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 16:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 17:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -488,16 +488,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 19:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 20:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 21:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 22:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 23:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -509,13 +509,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 26:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 27:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 28:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 29:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -524,146 +524,146 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 31:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 32:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 33:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 34:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 35:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
     Row group indices for column 36:
       Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
-  Stripe: offset: 1501653 data: 1498336 rows: 12288 tail: 493 index: 2821
-    Stream: column 0 section ROW_INDEX start: 1501653 length 21
-    Stream: column 1 section ROW_INDEX start: 1501674 length 53
-    Stream: column 2 section ROW_INDEX start: 1501727 length 67
-    Stream: column 3 section ROW_INDEX start: 1501794 length 79
-    Stream: column 4 section ROW_INDEX start: 1501873 length 83
-    Stream: column 5 section ROW_INDEX start: 1501956 length 77
-    Stream: column 6 section ROW_INDEX start: 1502033 length 77
-    Stream: column 7 section ROW_INDEX start: 1502110 length 170
-    Stream: column 8 section ROW_INDEX start: 1502280 length 264
-    Stream: column 9 section ROW_INDEX start: 1502544 length 63
-    Stream: column 10 section ROW_INDEX start: 1502607 length 57
-    Stream: column 11 section ROW_INDEX start: 1502664 length 47
-    Stream: column 12 section ROW_INDEX start: 1502711 length 47
-    Stream: column 13 section ROW_INDEX start: 1502758 length 53
-    Stream: column 14 section ROW_INDEX start: 1502811 length 67
-    Stream: column 15 section ROW_INDEX start: 1502878 length 79
-    Stream: column 16 section ROW_INDEX start: 1502957 length 83
-    Stream: column 17 section ROW_INDEX start: 1503040 length 77
-    Stream: column 18 section ROW_INDEX start: 1503117 length 77
-    Stream: column 19 section ROW_INDEX start: 1503194 length 115
-    Stream: column 20 section ROW_INDEX start: 1503309 length 93
-    Stream: column 21 section ROW_INDEX start: 1503402 length 63
-    Stream: column 22 section ROW_INDEX start: 1503465 length 57
-    Stream: column 23 section ROW_INDEX start: 1503522 length 47
-    Stream: column 24 section ROW_INDEX start: 1503569 length 47
-    Stream: column 25 section ROW_INDEX start: 1503616 length 53
-    Stream: column 26 section ROW_INDEX start: 1503669 length 67
-    Stream: column 27 section ROW_INDEX start: 1503736 length 79
-    Stream: column 28 section ROW_INDEX start: 1503815 length 83
-    Stream: column 29 section ROW_INDEX start: 1503898 length 77
-    Stream: column 30 section ROW_INDEX start: 1503975 length 77
-    Stream: column 31 section ROW_INDEX start: 1504052 length 115
-    Stream: column 32 section ROW_INDEX start: 1504167 length 93
-    Stream: column 33 section ROW_INDEX start: 1504260 length 63
-    Stream: column 34 section ROW_INDEX start: 1504323 length 57
-    Stream: column 35 section ROW_INDEX start: 1504380 length 47
-    Stream: column 36 section ROW_INDEX start: 1504427 length 47
-    Stream: column 1 section PRESENT start: 1504474 length 51
-    Stream: column 1 section DATA start: 1504525 length 5448
-    Stream: column 2 section PRESENT start: 1509973 length 53
-    Stream: column 2 section DATA start: 1510026 length 12078
-    Stream: column 3 section PRESENT start: 1522104 length 53
-    Stream: column 3 section DATA start: 1522157 length 24479
-    Stream: column 4 section PRESENT start: 1546636 length 52
-    Stream: column 4 section DATA start: 1546688 length 24479
-    Stream: column 5 section PRESENT start: 1571167 length 51
-    Stream: column 5 section DATA start: 1571218 length 9927
-    Stream: column 6 section PRESENT start: 1581145 length 53
-    Stream: column 6 section DATA start: 1581198 length 19755
-    Stream: column 7 section DATA start: 1600953 length 259558
-    Stream: column 7 section LENGTH start: 1860511 length 12
-    Stream: column 8 section DATA start: 1860523 length 518777
-    Stream: column 8 section LENGTH start: 2379300 length 12
-    Stream: column 9 section PRESENT start: 2379312 length 52
-    Stream: column 9 section DATA start: 2379364 length 7769
-    Stream: column 9 section SECONDARY start: 2387133 length 9448
-    Stream: column 10 section PRESENT start: 2396581 length 58
-    Stream: column 10 section DATA start: 2396639 length 7778
-    Stream: column 10 section SECONDARY start: 2404417 length 9469
-    Stream: column 11 section PRESENT start: 2413886 length 51
-    Stream: column 11 section DATA start: 2413937 length 782
-    Stream: column 12 section PRESENT start: 2414719 length 54
-    Stream: column 12 section DATA start: 2414773 length 783
-    Stream: column 13 section PRESENT start: 2415556 length 51
-    Stream: column 13 section DATA start: 2415607 length 5448
-    Stream: column 14 section PRESENT start: 2421055 length 53
-    Stream: column 14 section DATA start: 2421108 length 12078
-    Stream: column 15 section PRESENT start: 2433186 length 53
-    Stream: column 15 section DATA start: 2433239 length 24479
-    Stream: column 16 section PRESENT start: 2457718 length 52
-    Stream: column 16 section DATA start: 2457770 length 24479
-    Stream: column 17 section PRESENT start: 2482249 length 51
-    Stream: column 17 section DATA start: 2482300 length 9927
-    Stream: column 18 section PRESENT start: 2492227 length 53
-    Stream: column 18 section DATA start: 2492280 length 19755
-    Stream: column 19 section PRESENT start: 2512035 length 51
-    Stream: column 19 section DATA start: 2512086 length 10942
-    Stream: column 19 section LENGTH start: 2523028 length 3722
-    Stream: column 19 section DICTIONARY_DATA start: 2526750 length 65435
-    Stream: column 20 section PRESENT start: 2592185 length 54
-    Stream: column 20 section DATA start: 2592239 length 10939
-    Stream: column 20 section LENGTH start: 2603178 length 3739
-    Stream: column 20 section DICTIONARY_DATA start: 2606917 length 66022
-    Stream: column 21 section PRESENT start: 2672939 length 52
-    Stream: column 21 section DATA start: 2672991 length 7769
-    Stream: column 21 section SECONDARY start: 2680760 length 9448
-    Stream: column 22 section PRESENT start: 2690208 length 58
-    Stream: column 22 section DATA start: 2690266 length 7778
-    Stream: column 22 section SECONDARY start: 2698044 length 9469
-    Stream: column 23 section PRESENT start: 2707513 length 51
-    Stream: column 23 section DATA start: 2707564 length 782
-    Stream: column 24 section PRESENT start: 2708346 length 54
-    Stream: column 24 section DATA start: 2708400 length 783
-    Stream: column 25 section PRESENT start: 2709183 length 51
-    Stream: column 25 section DATA start: 2709234 length 5448
-    Stream: column 26 section PRESENT start: 2714682 length 53
-    Stream: column 26 section DATA start: 2714735 length 12078
-    Stream: column 27 section PRESENT start: 2726813 length 53
-    Stream: column 27 section DATA start: 2726866 length 24479
-    Stream: column 28 section PRESENT start: 2751345 length 52
-    Stream: column 28 section DATA start: 2751397 length 24479
-    Stream: column 29 section PRESENT start: 2775876 length 51
-    Stream: column 29 section DATA start: 2775927 length 9927
-    Stream: column 30 section PRESENT start: 2785854 length 53
-    Stream: column 30 section DATA start: 2785907 length 19755
-    Stream: column 31 section PRESENT start: 2805662 length 51
-    Stream: column 31 section DATA start: 2805713 length 10942
-    Stream: column 31 section LENGTH start: 2816655 length 3722
-    Stream: column 31 section DICTIONARY_DATA start: 2820377 length 65435
-    Stream: column 32 section PRESENT start: 2885812 length 54
-    Stream: column 32 section DATA start: 2885866 length 10939
-    Stream: column 32 section LENGTH start: 2896805 length 3739
-    Stream: column 32 section DICTIONARY_DATA start: 2900544 length 66022
-    Stream: column 33 section PRESENT start: 2966566 length 52
-    Stream: column 33 section DATA start: 2966618 length 7769
-    Stream: column 33 section SECONDARY start: 2974387 length 9448
-    Stream: column 34 section PRESENT start: 2983835 length 58
-    Stream: column 34 section DATA start: 2983893 length 7778
-    Stream: column 34 section SECONDARY start: 2991671 length 9469
-    Stream: column 35 section PRESENT start: 3001140 length 51
-    Stream: column 35 section DATA start: 3001191 length 782
-    Stream: column 36 section PRESENT start: 3001973 length 54
-    Stream: column 36 section DATA start: 3002027 length 783
+  Stripe: offset: 1503357 data: 1500017 rows: 12288 tail: 501 index: 2836
+    Stream: column 0 section ROW_INDEX start: 1503357 length 21
+    Stream: column 1 section ROW_INDEX start: 1503378 length 53
+    Stream: column 2 section ROW_INDEX start: 1503431 length 67
+    Stream: column 3 section ROW_INDEX start: 1503498 length 81
+    Stream: column 4 section ROW_INDEX start: 1503579 length 83
+    Stream: column 5 section ROW_INDEX start: 1503662 length 77
+    Stream: column 6 section ROW_INDEX start: 1503739 length 77
+    Stream: column 7 section ROW_INDEX start: 1503816 length 176
+    Stream: column 8 section ROW_INDEX start: 1503992 length 267
+    Stream: column 9 section ROW_INDEX start: 1504259 length 63
+    Stream: column 10 section ROW_INDEX start: 1504322 length 57
+    Stream: column 11 section ROW_INDEX start: 1504379 length 47
+    Stream: column 12 section ROW_INDEX start: 1504426 length 47
+    Stream: column 13 section ROW_INDEX start: 1504473 length 53
+    Stream: column 14 section ROW_INDEX start: 1504526 length 67
+    Stream: column 15 section ROW_INDEX start: 1504593 length 81
+    Stream: column 16 section ROW_INDEX start: 1504674 length 83
+    Stream: column 17 section ROW_INDEX start: 1504757 length 77
+    Stream: column 18 section ROW_INDEX start: 1504834 length 77
+    Stream: column 19 section ROW_INDEX start: 1504911 length 115
+    Stream: column 20 section ROW_INDEX start: 1505026 length 93
+    Stream: column 21 section ROW_INDEX start: 1505119 length 63
+    Stream: column 22 section ROW_INDEX start: 1505182 length 57
+    Stream: column 23 section ROW_INDEX start: 1505239 length 47
+    Stream: column 24 section ROW_INDEX start: 1505286 length 47
+    Stream: column 25 section ROW_INDEX start: 1505333 length 53
+    Stream: column 26 section ROW_INDEX start: 1505386 length 67
+    Stream: column 27 section ROW_INDEX start: 1505453 length 81
+    Stream: column 28 section ROW_INDEX start: 1505534 length 83
+    Stream: column 29 section ROW_INDEX start: 1505617 length 77
+    Stream: column 30 section ROW_INDEX start: 1505694 length 77
+    Stream: column 31 section ROW_INDEX start: 1505771 length 115
+    Stream: column 32 section ROW_INDEX start: 1505886 length 93
+    Stream: column 33 section ROW_INDEX start: 1505979 length 63
+    Stream: column 34 section ROW_INDEX start: 1506042 length 57
+    Stream: column 35 section ROW_INDEX start: 1506099 length 47
+    Stream: column 36 section ROW_INDEX start: 1506146 length 47
+    Stream: column 1 section PRESENT start: 1506193 length 51
+    Stream: column 1 section DATA start: 1506244 length 5448
+    Stream: column 2 section PRESENT start: 1511692 length 53
+    Stream: column 2 section DATA start: 1511745 length 12144
+    Stream: column 3 section PRESENT start: 1523889 length 53
+    Stream: column 3 section DATA start: 1523942 length 24618
+    Stream: column 4 section PRESENT start: 1548560 length 52
+    Stream: column 4 section DATA start: 1548612 length 24681
+    Stream: column 5 section PRESENT start: 1573293 length 51
+    Stream: column 5 section DATA start: 1573344 length 9927
+    Stream: column 6 section PRESENT start: 1583271 length 53
+    Stream: column 6 section DATA start: 1583324 length 19755
+    Stream: column 7 section DATA start: 1603079 length 258570
+    Stream: column 7 section LENGTH start: 1861649 length 108
+    Stream: column 8 section DATA start: 1861757 length 517341
+    Stream: column 8 section LENGTH start: 2379098 length 108
+    Stream: column 9 section PRESENT start: 2379206 length 52
+    Stream: column 9 section DATA start: 2379258 length 8045
+    Stream: column 9 section SECONDARY start: 2387303 length 9555
+    Stream: column 10 section PRESENT start: 2396858 length 58
+    Stream: column 10 section DATA start: 2396916 length 8082
+    Stream: column 10 section SECONDARY start: 2404998 length 9590
+    Stream: column 11 section PRESENT start: 2414588 length 51
+    Stream: column 11 section DATA start: 2414639 length 782
+    Stream: column 12 section PRESENT start: 2415421 length 54
+    Stream: column 12 section DATA start: 2415475 length 783
+    Stream: column 13 section PRESENT start: 2416258 length 51
+    Stream: column 13 section DATA start: 2416309 length 5448
+    Stream: column 14 section PRESENT start: 2421757 length 53
+    Stream: column 14 section DATA start: 2421810 length 12144
+    Stream: column 15 section PRESENT start: 2433954 length 53
+    Stream: column 15 section DATA start: 2434007 length 24618
+    Stream: column 16 section PRESENT start: 2458625 length 52
+    Stream: column 16 section DATA start: 2458677 length 24681
+    Stream: column 17 section PRESENT start: 2483358 length 51
+    Stream: column 17 section DATA start: 2483409 length 9927
+    Stream: column 18 section PRESENT start: 2493336 length 53
+    Stream: column 18 section DATA start: 2493389 length 19755
+    Stream: column 19 section PRESENT start: 2513144 length 51
+    Stream: column 19 section DATA start: 2513195 length 11009
+    Stream: column 19 section LENGTH start: 2524204 length 3722
+    Stream: column 19 section DICTIONARY_DATA start: 2527926 length 65435
+    Stream: column 20 section PRESENT start: 2593361 length 54
+    Stream: column 20 section DATA start: 2593415 length 11006
+    Stream: column 20 section LENGTH start: 2604421 length 3739
+    Stream: column 20 section DICTIONARY_DATA start: 2608160 length 66022
+    Stream: column 21 section PRESENT start: 2674182 length 52
+    Stream: column 21 section DATA start: 2674234 length 8045
+    Stream: column 21 section SECONDARY start: 2682279 length 9555
+    Stream: column 22 section PRESENT start: 2691834 length 58
+    Stream: column 22 section DATA start: 2691892 length 8082
+    Stream: column 22 section SECONDARY start: 2699974 length 9590
+    Stream: column 23 section PRESENT start: 2709564 length 51
+    Stream: column 23 section DATA start: 2709615 length 782
+    Stream: column 24 section PRESENT start: 2710397 length 54
+    Stream: column 24 section DATA start: 2710451 length 783
+    Stream: column 25 section PRESENT start: 2711234 length 51
+    Stream: column 25 section DATA start: 2711285 length 5448
+    Stream: column 26 section PRESENT start: 2716733 length 53
+    Stream: column 26 section DATA start: 2716786 length 12144
+    Stream: column 27 section PRESENT start: 2728930 length 53
+    Stream: column 27 section DATA start: 2728983 length 24618
+    Stream: column 28 section PRESENT start: 2753601 length 52
+    Stream: column 28 section DATA start: 2753653 length 24681
+    Stream: column 29 section PRESENT start: 2778334 length 51
+    Stream: column 29 section DATA start: 2778385 length 9927
+    Stream: column 30 section PRESENT start: 2788312 length 53
+    Stream: column 30 section DATA start: 2788365 length 19755
+    Stream: column 31 section PRESENT start: 2808120 length 51
+    Stream: column 31 section DATA start: 2808171 length 11009
+    Stream: column 31 section LENGTH start: 2819180 length 3722
+    Stream: column 31 section DICTIONARY_DATA start: 2822902 length 65435
+    Stream: column 32 section PRESENT start: 2888337 length 54
+    Stream: column 32 section DATA start: 2888391 length 11006
+    Stream: column 32 section LENGTH start: 2899397 length 3739
+    Stream: column 32 section DICTIONARY_DATA start: 2903136 length 66022
+    Stream: column 33 section PRESENT start: 2969158 length 52
+    Stream: column 33 section DATA start: 2969210 length 8045
+    Stream: column 33 section SECONDARY start: 2977255 length 9555
+    Stream: column 34 section PRESENT start: 2986810 length 58
+    Stream: column 34 section DATA start: 2986868 length 8082
+    Stream: column 34 section SECONDARY start: 2994950 length 9590
+    Stream: column 35 section PRESENT start: 3004540 length 51
+    Stream: column 35 section DATA start: 3004591 length 782
+    Stream: column 36 section PRESENT start: 3005373 length 54
+    Stream: column 36 section DATA start: 3005427 length 783
     Encoding column 0: DIRECT
     Encoding column 1: DIRECT
     Encoding column 2: DIRECT_V2
@@ -709,13 +709,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 2:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 3:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 4:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 5:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -724,16 +724,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 7:
       Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
     Row group indices for column 8:
       Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
-      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
+      Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
     Row group indices for column 9:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 10:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 11:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -745,13 +745,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 14:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 15:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 16:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 17:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -760,16 +760,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 19:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 20:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 21:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 22:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 23:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -781,13 +781,13 @@ Stripes:
       Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
     Row group indices for column 26:
       Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
+      Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
     Row group indices for column 27:
       Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
+      Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
     Row group indices for column 28:
       Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
+      Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
     Row group indices for column 29:
       Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
       Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -796,16 +796,16 @@ Stripes:
       Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
     Row group indices for column 31:
       Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
+      Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
     Row group indices for column 32:
       Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
-      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
+      Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
     Row group indices for column 33:
-      Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
+      Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:43.64 max UTC: 1969-12-31 06:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
     Row group indices for column 34:
-      Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
-      Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
+      Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+      Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 min UTC: 1969-12-31 05:59:30.929 max UTC: 1969-12-31 06:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
     Row group indices for column 35:
       Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -813,7 +813,7 @@ Stripes:
       Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
       Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
 
-File length: 3004630 bytes
+File length: 3007984 bytes
 Padding length: 0 bytes
 Padding ratio: 0%
 ________________________________________________________________________________________________________________________

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp.q.out b/ql/src/test/results/clientpositive/timestamp.q.out
index 5b6d3eb..8fafd12 100644
--- a/ql/src/test/results/clientpositive/timestamp.q.out
+++ b/ql/src/test/results/clientpositive/timestamp.q.out
@@ -20,7 +20,7 @@ STAGE PLANS:
                 Select Operator
                   Statistics: Num rows: 1000 Data size: 40000 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
-                    keys: TIMESTAMP'2011-01-01 01:01:01' (type: timestamp)
+                    keys: TIMESTAMP'2011-01-01 01:01:01.0' (type: timestamp)
                     mode: hash
                     outputColumnNames: _col0
                     Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
@@ -40,7 +40,7 @@ STAGE PLANS:
                 Select Operator
                   Statistics: Num rows: 1000 Data size: 40000 Basic stats: COMPLETE Column stats: COMPLETE
                   Group By Operator
-                    keys: TIMESTAMP'2011-01-01 01:01:01' (type: timestamp)
+                    keys: TIMESTAMP'2011-01-01 01:01:01.0' (type: timestamp)
                     mode: hash
                     outputColumnNames: _col0
                     Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
@@ -57,7 +57,7 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
-            expressions: TIMESTAMP'2011-01-01 01:01:01' (type: timestamp)
+            expressions: TIMESTAMP'2011-01-01 01:01:01.0' (type: timestamp)
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
             Limit

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/timestamp_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_1.q.out b/ql/src/test/results/clientpositive/timestamp_1.q.out
index 8221d1b..fab69ec 100644
--- a/ql/src/test/results/clientpositive/timestamp_1.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_1.q.out
@@ -64,7 +64,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -73,7 +73,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -82,7 +82,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -91,7 +91,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -147,7 +147,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -156,7 +156,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -165,7 +165,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -174,7 +174,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661E9
+1.293872461E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -230,7 +230,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -239,7 +239,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -248,7 +248,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -257,7 +257,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436611E9
+1.2938724611E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -313,7 +313,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -322,7 +322,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -331,7 +331,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -340,7 +340,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -396,7 +396,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -405,7 +405,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -414,7 +414,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -423,7 +423,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.2938436610001E9
+1.2938724610001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -479,7 +479,7 @@ POSTHOOK: query: select cast(t as int) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -488,7 +488,7 @@ POSTHOOK: query: select cast(t as bigint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1293843661
+1293872461
 PREHOOK: query: select cast(t as float) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -497,7 +497,7 @@ POSTHOOK: query: select cast(t as float) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.29384371E9
+1.29387251E9
 PREHOOK: query: select cast(t as double) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -506,7 +506,7 @@ POSTHOOK: query: select cast(t as double) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-1.293843661001E9
+1.293872461001E9
 PREHOOK: query: select cast(t as string) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1


[18/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out b/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out
index 57a1ea7..1bbb9d0 100644
--- a/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_partitioned_date_time.q.out
@@ -1863,18 +1863,18 @@ POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-
 POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-30 07%3A00%3A00
 POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-31 07%3A00%3A00
 #### A masked pattern was here ####
-2010-10-21 07:00:00	12
-2010-10-24 07:00:00	12
-2010-10-31 07:00:00	8
-2010-10-22 07:00:00	11
 2010-10-23 07:00:00	12
-2010-10-30 07:00:00	11
-2010-10-20 07:00:00	11
 2010-10-25 07:00:00	12
+2010-10-22 07:00:00	11
+2010-10-24 07:00:00	12
 2010-10-26 07:00:00	13
-2010-10-27 07:00:00	11
+2010-10-20 07:00:00	11
 2010-10-28 07:00:00	12
 2010-10-29 07:00:00	12
+2010-10-30 07:00:00	11
+2010-10-31 07:00:00	8
+2010-10-21 07:00:00	12
+2010-10-27 07:00:00	11
 PREHOOK: query: explain vectorization expression
 select * from flights_tiny_orc_partitioned_timestamp
 PREHOOK: type: QUERY
@@ -2439,18 +2439,18 @@ POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-
 POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-30 07%3A00%3A00
 POSTHOOK: Input: default@flights_tiny_orc_partitioned_timestamp@fl_time=2010-10-31 07%3A00%3A00
 #### A masked pattern was here ####
-2010-10-21 07:00:00	12
-2010-10-24 07:00:00	12
-2010-10-31 07:00:00	8
-2010-10-22 07:00:00	11
 2010-10-23 07:00:00	12
-2010-10-30 07:00:00	11
-2010-10-20 07:00:00	11
 2010-10-25 07:00:00	12
+2010-10-22 07:00:00	11
+2010-10-24 07:00:00	12
 2010-10-26 07:00:00	13
-2010-10-27 07:00:00	11
+2010-10-20 07:00:00	11
 2010-10-28 07:00:00	12
 2010-10-29 07:00:00	12
+2010-10-30 07:00:00	11
+2010-10-31 07:00:00	8
+2010-10-21 07:00:00	12
+2010-10-27 07:00:00	11
 PREHOOK: query: CREATE TABLE flights_tiny_parquet STORED AS PARQUET AS
 SELECT origin_city_name, dest_city_name, fl_date, to_utc_timestamp(fl_date, 'America/Los_Angeles') as fl_time, arr_delay, fl_num
 FROM flights_tiny_n1
@@ -4288,18 +4288,18 @@ POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010
 POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010-10-30 07%3A00%3A00
 POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010-10-31 07%3A00%3A00
 #### A masked pattern was here ####
-2010-10-21 07:00:00	12
-2010-10-24 07:00:00	12
-2010-10-31 07:00:00	8
-2010-10-22 07:00:00	11
 2010-10-23 07:00:00	12
-2010-10-30 07:00:00	11
-2010-10-20 07:00:00	11
 2010-10-25 07:00:00	12
+2010-10-22 07:00:00	11
+2010-10-24 07:00:00	12
 2010-10-26 07:00:00	13
-2010-10-27 07:00:00	11
+2010-10-20 07:00:00	11
 2010-10-28 07:00:00	12
 2010-10-29 07:00:00	12
+2010-10-30 07:00:00	11
+2010-10-31 07:00:00	8
+2010-10-21 07:00:00	12
+2010-10-27 07:00:00	11
 PREHOOK: query: explain vectorization expression
 select * from flights_tiny_parquet_partitioned_timestamp
 PREHOOK: type: QUERY
@@ -4864,15 +4864,15 @@ POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010
 POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010-10-30 07%3A00%3A00
 POSTHOOK: Input: default@flights_tiny_parquet_partitioned_timestamp@fl_time=2010-10-31 07%3A00%3A00
 #### A masked pattern was here ####
-2010-10-21 07:00:00	12
-2010-10-24 07:00:00	12
-2010-10-31 07:00:00	8
-2010-10-22 07:00:00	11
 2010-10-23 07:00:00	12
-2010-10-30 07:00:00	11
-2010-10-20 07:00:00	11
 2010-10-25 07:00:00	12
+2010-10-22 07:00:00	11
+2010-10-24 07:00:00	12
 2010-10-26 07:00:00	13
-2010-10-27 07:00:00	11
+2010-10-20 07:00:00	11
 2010-10-28 07:00:00	12
 2010-10-29 07:00:00	12
+2010-10-30 07:00:00	11
+2010-10-31 07:00:00	8
+2010-10-21 07:00:00	12
+2010-10-27 07:00:00	11

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out b/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out
index e16f843..2471c5d 100644
--- a/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out
@@ -5640,13 +5640,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:p_mfgr:string, 1:p_name:string, 2:p_retailprice:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Reduce Output Operator
-                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     sort order: ++
-                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     Reduce Sink Vectorization:
                         className: VectorReduceSinkMultiKeyOperator
                         keyColumnNums: [0, 6]
-                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp
+                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 5:timestamp) -> 6:timestamp
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         valueColumnNums: [1, 2]
@@ -5690,13 +5690,13 @@ STAGE PLANS:
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col0 ASC NULLS FIRST, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END ASC NULLS FIRST
-                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                        order by: _col0 ASC NULLS FIRST, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END ASC NULLS FIRST
+                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                         raw input shape:
                         window functions:
                             window function definition
                               alias: rank_window_0
-                              arguments: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                              arguments: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                               name: rank
                               window function: GenericUDAFRankEvaluator
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
@@ -5733,12 +5733,6 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@vector_ptf_part_simple_orc
 #### A masked pattern was here ####
 p_mfgr	p_name	p_retailprice	r
-Manufacturer#5	almond antique blue firebrick mint	1789.69	1
-Manufacturer#5	almond azure blanched chiffon midnight	1464.48	1
-Manufacturer#5	almond aquamarine dodger light gainsboro	1018.1	1
-Manufacturer#5	almond antique medium spring khaki	1611.66	1
-Manufacturer#5	almond antique sky peru orange	1788.73	1
-Manufacturer#5	almond antique medium spring khaki	1611.66	1
 Manufacturer#2	almond aquamarine rose maroon antique	900.66	1
 Manufacturer#2	almond aquamarine rose maroon antique	1698.66	1
 Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
@@ -5747,31 +5741,37 @@ Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
 Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
 Manufacturer#2	almond aquamarine sandy cyan gainsboro	1000.6	1
 Manufacturer#2	almond aquamarine midnight light salmon	2031.98	1
+Manufacturer#5	almond antique sky peru orange	1788.73	1
+Manufacturer#5	almond antique medium spring khaki	1611.66	1
+Manufacturer#5	almond antique medium spring khaki	1611.66	1
+Manufacturer#5	almond aquamarine dodger light gainsboro	1018.1	1
+Manufacturer#5	almond azure blanched chiffon midnight	1464.48	1
+Manufacturer#5	almond antique blue firebrick mint	1789.69	1
 Manufacturer#4	almond azure aquamarine papaya violet	1290.35	1
-Manufacturer#4	almond aquamarine yellow dodger mint	1844.92	1
-Manufacturer#4	almond aquamarine floral ivory bisque	1206.26	1
+Manufacturer#4	almond aquamarine floral ivory bisque	NULL	1
 Manufacturer#4	almond antique gainsboro frosted violet	NULL	1
 Manufacturer#4	almond antique violet mint lemon	1375.42	1
-Manufacturer#4	almond aquamarine floral ivory bisque	NULL	1
-Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
+Manufacturer#4	almond aquamarine yellow dodger mint	1844.92	1
+Manufacturer#4	almond aquamarine floral ivory bisque	1206.26	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
 Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
-Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
+Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
 Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
+Manufacturer#1	almond antique salmon chartreuse burlywood	1602.59	1
 Manufacturer#1	almond antique burnished rose metallic	1173.15	1
 Manufacturer#1	almond aquamarine burnished black steel	1414.42	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	NULL	1
-Manufacturer#1	almond antique salmon chartreuse burlywood	1602.59	1
+Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond antique burnished rose metallic	1173.15	1
+Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
+Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
+Manufacturer#3	almond antique metallic orange dim	55.39	1
 Manufacturer#3	almond antique olive coral navajo	1337.29	1
-Manufacturer#3	almond antique forest lavender goldenrod	590.27	1
 Manufacturer#3	almond antique chartreuse khaki white	99.68	1
-Manufacturer#3	almond antique metallic orange dim	55.39	1
+Manufacturer#3	almond antique forest lavender goldenrod	590.27	1
 Manufacturer#3	almond antique misty red olive	1922.98	1
-Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
-Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
 Manufacturer#3	almond antique forest lavender goldenrod	NULL	1
 PREHOOK: query: explain vectorization detail
 select p_mfgr, p_name, p_retailprice,
@@ -5809,13 +5809,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:p_mfgr:string, 1:p_name:string, 2:p_retailprice:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Reduce Output Operator
-                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp), p_name (type: string)
+                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp), p_name (type: string)
                     sort order: +++
-                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     Reduce Sink Vectorization:
                         className: VectorReduceSinkObjectHashOperator
                         keyColumnNums: [0, 6, 1]
-                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp
+                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 5:timestamp) -> 6:timestamp
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         partitionColumnNums: [0, 9]
@@ -5873,7 +5873,7 @@ STAGE PLANS:
                         input alias: ptf_1
                         name: windowingtablefunction
                         order by: _col1 ASC NULLS FIRST
-                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                         raw input shape:
                         window functions:
                             window function definition
@@ -5894,7 +5894,7 @@ STAGE PLANS:
                       orderExpressions: [col 2:string]
                       outputColumns: [4, 0, 2, 3]
                       outputTypes: [int, string, string, double]
-                      partitionExpressions: [col 0:string, IfExprColumnNull(col 5:boolean, col 6:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 5:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 6:timestamp) -> 7:timestamp]
+                      partitionExpressions: [col 0:string, IfExprColumnNull(col 5:boolean, col 6:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 5:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 6:timestamp) -> 7:timestamp]
                       streamingColumns: [4]
                   Statistics: Num rows: 40 Data size: 19816 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
@@ -6541,13 +6541,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:p_mfgr:string, 1:p_name:string, 2:p_retailprice:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Reduce Output Operator
-                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp), p_name (type: string)
+                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp), p_name (type: string)
                     sort order: +++
-                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     Reduce Sink Vectorization:
                         className: VectorReduceSinkObjectHashOperator
                         keyColumnNums: [0, 6, 1]
-                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp
+                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 5:timestamp) -> 6:timestamp
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         partitionColumnNums: [0, 9]
@@ -6605,7 +6605,7 @@ STAGE PLANS:
                         input alias: ptf_1
                         name: windowingtablefunction
                         order by: _col1 ASC NULLS FIRST
-                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                         raw input shape:
                         window functions:
                             window function definition
@@ -6626,7 +6626,7 @@ STAGE PLANS:
                       orderExpressions: [col 2:string]
                       outputColumns: [4, 0, 2, 3]
                       outputTypes: [int, string, string, double]
-                      partitionExpressions: [col 0:string, IfExprColumnNull(col 5:boolean, col 6:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 5:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 6:timestamp) -> 7:timestamp]
+                      partitionExpressions: [col 0:string, IfExprColumnNull(col 5:boolean, col 6:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 5:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 6:timestamp) -> 7:timestamp]
                       streamingColumns: [4]
                   Statistics: Num rows: 40 Data size: 19816 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
@@ -6743,13 +6743,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:p_mfgr:string, 1:p_name:string, 2:p_retailprice:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Reduce Output Operator
-                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    key expressions: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     sort order: ++
-                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
+                    Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp)
                     Reduce Sink Vectorization:
                         className: VectorReduceSinkMultiKeyOperator
                         keyColumnNums: [0, 6]
-                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp
+                        keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00.0) -> 5:timestamp) -> 6:timestamp
                         native: true
                         nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                         valueColumnNums: [1, 2]
@@ -6793,13 +6793,13 @@ STAGE PLANS:
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col0 ASC NULLS FIRST, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END ASC NULLS FIRST
-                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                        order by: _col0 ASC NULLS FIRST, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END ASC NULLS FIRST
+                        partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                         raw input shape:
                         window functions:
                             window function definition
                               alias: rank_window_0
-                              arguments: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END
+                              arguments: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00.0') ELSE (CAST( null AS TIMESTAMP)) END
                               name: rank
                               window function: GenericUDAFRankEvaluator
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
@@ -6836,12 +6836,6 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@vector_ptf_part_simple_orc
 #### A masked pattern was here ####
 p_mfgr	p_name	p_retailprice	r
-Manufacturer#5	almond antique blue firebrick mint	1789.69	1
-Manufacturer#5	almond azure blanched chiffon midnight	1464.48	1
-Manufacturer#5	almond aquamarine dodger light gainsboro	1018.1	1
-Manufacturer#5	almond antique medium spring khaki	1611.66	1
-Manufacturer#5	almond antique sky peru orange	1788.73	1
-Manufacturer#5	almond antique medium spring khaki	1611.66	1
 Manufacturer#2	almond aquamarine rose maroon antique	900.66	1
 Manufacturer#2	almond aquamarine rose maroon antique	1698.66	1
 Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
@@ -6850,29 +6844,35 @@ Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
 Manufacturer#2	almond antique violet turquoise frosted	1800.7	1
 Manufacturer#2	almond aquamarine sandy cyan gainsboro	1000.6	1
 Manufacturer#2	almond aquamarine midnight light salmon	2031.98	1
+Manufacturer#5	almond antique sky peru orange	1788.73	1
+Manufacturer#5	almond antique medium spring khaki	1611.66	1
+Manufacturer#5	almond antique medium spring khaki	1611.66	1
+Manufacturer#5	almond aquamarine dodger light gainsboro	1018.1	1
+Manufacturer#5	almond azure blanched chiffon midnight	1464.48	1
+Manufacturer#5	almond antique blue firebrick mint	1789.69	1
 Manufacturer#4	almond azure aquamarine papaya violet	1290.35	1
-Manufacturer#4	almond aquamarine yellow dodger mint	1844.92	1
-Manufacturer#4	almond aquamarine floral ivory bisque	1206.26	1
+Manufacturer#4	almond aquamarine floral ivory bisque	NULL	1
 Manufacturer#4	almond antique gainsboro frosted violet	NULL	1
 Manufacturer#4	almond antique violet mint lemon	1375.42	1
-Manufacturer#4	almond aquamarine floral ivory bisque	NULL	1
-Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
+Manufacturer#4	almond aquamarine yellow dodger mint	1844.92	1
+Manufacturer#4	almond aquamarine floral ivory bisque	1206.26	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
 Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
-Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
+Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	1632.66	1
 Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
+Manufacturer#1	almond antique salmon chartreuse burlywood	1602.59	1
 Manufacturer#1	almond antique burnished rose metallic	1173.15	1
 Manufacturer#1	almond aquamarine burnished black steel	1414.42	1
 Manufacturer#1	almond aquamarine pink moccasin thistle	NULL	1
-Manufacturer#1	almond antique salmon chartreuse burlywood	1602.59	1
+Manufacturer#1	almond antique chartreuse lavender yellow	1753.76	1
 Manufacturer#1	almond antique burnished rose metallic	1173.15	1
+Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
+Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
+Manufacturer#3	almond antique metallic orange dim	55.39	1
 Manufacturer#3	almond antique olive coral navajo	1337.29	1
-Manufacturer#3	almond antique forest lavender goldenrod	590.27	1
 Manufacturer#3	almond antique chartreuse khaki white	99.68	1
-Manufacturer#3	almond antique metallic orange dim	55.39	1
+Manufacturer#3	almond antique forest lavender goldenrod	590.27	1
 Manufacturer#3	almond antique misty red olive	1922.98	1
-Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
-Manufacturer#3	almond antique forest lavender goldenrod	1190.27	1
 Manufacturer#3	almond antique forest lavender goldenrod	NULL	1

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out b/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out
index e7a0ffb..1c96cd6 100644
--- a/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_udf_adaptor_1.q.out
@@ -131,7 +131,7 @@ STAGE PLANS:
                   alias: student_10_lines
                   Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01.0', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -290,13 +290,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:name:string, 1:age:int, 2:gpa:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01.0', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 5, 8, 11, 14, 16, 20]
-                        selectExpressions: IfExprColumnNull(col 4:boolean, col 1:int, null)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprColumnNull(col 6:boolean, col 7:timestamp, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01) -> 7:timestamp) -> 8:timestamp, IfExprColumnNull(col 10:boolean, col 0:string, null)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprColumnNull(col 12:boolean, col 13:binary, null)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprColumnNull(col 9:boolean, col 15:int, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprColumnNull(
 col 18:boolean, col 19:double, null)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
+                        selectExpressions: IfExprColumnNull(col 4:boolean, col 1:int, null)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprColumnNull(col 6:boolean, col 7:timestamp, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01.0) -> 7:timestamp) -> 8:timestamp, IfExprColumnNull(col 10:boolean, col 0:string, null)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprColumnNull(col 12:boolean, col 13:binary, null)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprColumnNull(col 9:boolean, col 15:int, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprColumnNul
 l(col 18:boolean, col 19:double, null)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -466,13 +466,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:name:string, 1:age:int, 2:gpa:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), age, null) (type: int), if((age > 40), TIMESTAMP'2011-01-01 01:01:01.0', null) (type: timestamp), if((length(name) > 8), name, null) (type: string), if((length(name) < 8), CAST( name AS BINARY), null) (type: binary), if((age > 40), length(name), null) (type: int), if((length(name) > 10), (2.0D * gpa), null) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 5, 8, 11, 14, 16, 20]
-                        selectExpressions: IfExprColumnNull(col 4:boolean, col 1:int, null)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprColumnNull(col 6:boolean, col 7:timestamp, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01) -> 7:timestamp) -> 8:timestamp, IfExprColumnNull(col 10:boolean, col 0:string, null)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprCondExprNull(col 12:boolean, col 13:binary, null)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprCondExprNull(col 9:boolean, col 15:int, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprCondExp
 rNull(col 18:boolean, col 19:double, null)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
+                        selectExpressions: IfExprColumnNull(col 4:boolean, col 1:int, null)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprColumnNull(col 6:boolean, col 7:timestamp, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01.0) -> 7:timestamp) -> 8:timestamp, IfExprColumnNull(col 10:boolean, col 0:string, null)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprCondExprNull(col 12:boolean, col 13:binary, null)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprCondExprNull(col 9:boolean, col 15:int, null)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprCondE
 xprNull(col 18:boolean, col 19:double, null)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -639,7 +639,7 @@ STAGE PLANS:
                   alias: student_10_lines
                   Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01.0') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
@@ -798,13 +798,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:name:string, 1:age:int, 2:gpa:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01.0') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 5, 8, 11, 14, 16, 20]
-                        selectExpressions: IfExprNullColumn(col 4:boolean, null, col 1)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprNullColumn(col 6:boolean, null, col 7)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01) -> 7:timestamp) -> 8:timestamp, IfExprNullColumn(col 10:boolean, null, col 0)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprNullColumn(col 12:boolean, null, col 13)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprNullColumn(col 9:boolean, null, col 15)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprNullColumn(col 18:boolean, null, col 19)(ch
 ildren: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
+                        selectExpressions: IfExprNullColumn(col 4:boolean, null, col 1)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprNullColumn(col 6:boolean, null, col 7)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01.0) -> 7:timestamp) -> 8:timestamp, IfExprNullColumn(col 10:boolean, null, col 0)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprNullColumn(col 12:boolean, null, col 13)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprNullColumn(col 9:boolean, null, col 15)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprNullColumn(col 18:boolean, null, col 19)(
 children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
@@ -974,13 +974,13 @@ STAGE PLANS:
                       native: true
                       vectorizationSchemaColumns: [0:name:string, 1:age:int, 2:gpa:double, 3:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
                   Select Operator
-                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
+                    expressions: name (type: string), age (type: int), gpa (type: double), if((age < 40), null, age) (type: int), if((age > 40), null, TIMESTAMP'2011-01-01 01:01:01.0') (type: timestamp), if((length(name) > 8), null, name) (type: string), if((length(name) < 8), null, CAST( name AS BINARY)) (type: binary), if((age > 40), null, length(name)) (type: int), if((length(name) > 10), null, (2.0D * gpa)) (type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                     Select Vectorization:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 5, 8, 11, 14, 16, 20]
-                        selectExpressions: IfExprNullColumn(col 4:boolean, null, col 1)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprNullColumn(col 6:boolean, null, col 7)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01) -> 7:timestamp) -> 8:timestamp, IfExprNullColumn(col 10:boolean, null, col 0)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprNullCondExpr(col 12:boolean, null, col 13:binary)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprNullCondExpr(col 9:boolean, null, col 15:int)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprNullCondExpr(col 18:boolean,
  null, col 19:double)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
+                        selectExpressions: IfExprNullColumn(col 4:boolean, null, col 1)(children: LongColLessLongScalar(col 1:int, val 40) -> 4:boolean, col 1:int) -> 5:int, IfExprNullColumn(col 6:boolean, null, col 7)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 6:boolean, ConstantVectorExpression(val 2011-01-01 01:01:01.0) -> 7:timestamp) -> 8:timestamp, IfExprNullColumn(col 10:boolean, null, col 0)(children: LongColGreaterLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 10:boolean, col 0:string) -> 11:string, IfExprNullCondExpr(col 12:boolean, null, col 13:binary)(children: LongColLessLongScalar(col 9:int, val 8)(children: StringLength(col 0:string) -> 9:int) -> 12:boolean, VectorUDFAdaptor(CAST( name AS BINARY)) -> 13:binary) -> 14:binary, IfExprNullCondExpr(col 9:boolean, null, col 15:int)(children: LongColGreaterLongScalar(col 1:int, val 40) -> 9:boolean, StringLength(col 0:string) -> 15:int) -> 16:int, IfExprNullCondExpr(col 18:boolea
 n, null, col 19:double)(children: LongColGreaterLongScalar(col 17:int, val 10)(children: StringLength(col 0:string) -> 17:int) -> 18:boolean, DoubleScalarMultiplyDoubleColumn(val 2.0, col 2:double) -> 19:double) -> 20:double
                     Statistics: Num rows: 12 Data size: 2352 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorization_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_13.q.out b/ql/src/test/results/clientpositive/llap/vectorization_13.q.out
index 56e3883..222d232 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -93,8 +93,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 901772 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -270,8 +270,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -304,8 +304,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28789)
-              AND ((ctimestamp2 != -28788)
+          OR ((ctimestamp1 > 11)
+              AND ((ctimestamp2 != 12)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -379,8 +379,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -412,8 +412,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -447,8 +447,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
-                    predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+                        predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+                    predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 901772 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -600,8 +600,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -634,8 +634,8 @@ FROM     alltypesorc
 WHERE    (((cfloat < 3569)
            AND ((10.175 >= cdouble)
                 AND (cboolean1 != 1)))
-          OR ((ctimestamp1 > -28801.388)
-              AND ((ctimestamp2 != -28801.3359999999999999)
+          OR ((ctimestamp1 > -1.388)
+              AND ((ctimestamp2 != -1.3359999999999999)
                    AND (ctinyint < 9763215.5639))))
 GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorization_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_7.q.out b/ql/src/test/results/clientpositive/llap/vectorization_7.q.out
index 19e39c8..b0e682a 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -80,8 +80,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-                    predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+                    predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 1342196 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -184,11 +184,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -212,11 +212,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800)
+        AND (((ctimestamp1 <= 0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28815)
+              OR ((ctimestamp2 > -15)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -266,11 +266,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -293,11 +293,11 @@ SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -329,8 +329,8 @@ STAGE PLANS:
                     Filter Vectorization:
                         className: VectorFilterOperator
                         native: true
-                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
-                    predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+                        predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+                    predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
                     Statistics: Num rows: 5461 Data size: 1342196 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -418,11 +418,11 @@ PREHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25
@@ -446,11 +446,11 @@ POSTHOOK: query: SELECT cboolean1,
        ((-(ctinyint)) % ctinyint) as c9
 FROM   alltypesorc
 WHERE  ((ctinyint != 0)
-        AND (((ctimestamp1 <= -28800.0)
+        AND (((ctimestamp1 <= 0.0)
           OR ((ctinyint = cint)
                OR (cstring2 LIKE 'ss')))
           AND ((988888 < cdouble)
-              OR ((ctimestamp2 > -28792.3149999999999995)
+              OR ((ctimestamp2 > 7.6850000000000005)
                   AND (3569 >= cdouble)))))
 ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
 LIMIT 25

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out
index 0ebf499..f19d8a6 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_decimal_date.q.out
@@ -12,9 +12,9 @@ POSTHOOK: Lineage: date_decimal_test.cdate EXPRESSION [(alltypesorc)alltypesorc.
 POSTHOOK: Lineage: date_decimal_test.cdecimal EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
 POSTHOOK: Lineage: date_decimal_test.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
-PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 PLAN VECTORIZATION:
   enabled: true
@@ -44,12 +44,12 @@ STAGE PLANS:
                     predicate: (cdouble is not null and cint is not null) (type: boolean)
                     Statistics: Num rows: 11060 Data size: 1891486 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: cdate (type: date), cint (type: int), cdecimal (type: decimal(20,10))
-                      outputColumnNames: _col0, _col1, _col2
+                      expressions: cdate (type: date), cdecimal (type: decimal(20,10))
+                      outputColumnNames: _col0, _col1
                       Select Vectorization:
                           className: VectorSelectOperator
                           native: true
-                          projectedOutputColumnNums: [2, 0, 3]
+                          projectedOutputColumnNums: [2, 3]
                       Statistics: Num rows: 11060 Data size: 1891486 Basic stats: COMPLETE Column stats: NONE
                       Limit
                         Number of rows: 10
@@ -85,21 +85,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+PREHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
+POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@date_decimal_test
 #### A masked pattern was here ####
-1970-01-07	528534767	-7959.5837837838
-1970-01-07	528534767	-2516.4135135135
-1970-01-07	528534767	-9445.0621621622
-1970-01-07	528534767	-5713.7459459459
-1970-01-07	528534767	8963.6405405405
-1970-01-07	528534767	4193.6243243243
-1970-01-07	528534767	2964.3864864865
-1970-01-07	528534767	-4673.2540540541
-1970-01-07	528534767	-9216.8945945946
-1970-01-07	528534767	-9287.3756756757
+1970-01-06	-7959.5837837838
+1970-01-06	-2516.4135135135
+1970-01-06	-9445.0621621622
+1970-01-06	-5713.7459459459
+1970-01-06	8963.6405405405
+1970-01-06	4193.6243243243
+1970-01-06	2964.3864864865
+1970-01-06	-4673.2540540541
+1970-01-06	-9216.8945945946
+1970-01-06	-9287.3756756757

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
index fa6fd6c..d12e038 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
@@ -265,7 +265,7 @@ WHERE  ((762 = cbigint)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
--1.2803533196894065E7	-1.2807261196894065E7	1.2807261196894065E7	-1.2807261196894065E7	1.6402593936546838E14	-275125.557	1.2803533196894065E7	6.102557176084042E8	-2.1007230485194618E21	9480.304481867239	-6.102557176084042E8	6.230629788052982E8	3.8022774524605715E17	3.7261870682317882E17	-11.503947368421052	-3.7261870682317882E17	3.7261870682317882E17	1083935.5552547143	6.104250214589658E8	-1083935.5552547143	46.53705506862114	-51	1029	-4.705076768887381E-5	-46.53705506862114
+1.6000018929276082E8	1.5999646129276082E8	-1.5999646129276082E8	1.5999646129276082E8	2.5598867626205912E16	-8706342.964000002	-1.6000018929276082E8	5.481251832900263E8	4.095728233294762E24	8549.657499338193	-5.481251832900263E8	3.8812872199726546E8	2.12743126884874784E17	3.0054786945575117E17	-5.700752675298234	-3.0054786945575117E17	3.0054786945575117E17	973579.3664121248	5.482224634724039E8	-973579.3664121248	-18.377427808018613	-64	2044	-6.573680812059058E-5	18.377427808018613
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT MAX(cint),
        (MAX(cint) / -3728),
@@ -987,7 +987,7 @@ WHERE  (((ctimestamp2 <= ctimestamp1)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc
 #### A masked pattern was here ####
-17.0	6998.0	7015.0	1942088700	412.6470588235294	-6998.0	1.7455632335840696E8	17.0	2.9018961928004512E16	1.0774839990192407E18	-1942088700	-11.125857045077739	17.0	-2.8316279494225646E19
+-0.5934409161894847	6980.406559083811	6979.813118167622	2141851355	-11761.597368421053	-6980.406559083811	1.5852855222071928E8	-0.5934409161894847	2.5099887741860824E16	1.52140608502098611E18	-2141851355	-13.510823917813244	79.553	-3.998255191435152E19
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
 SELECT cint,
        cdouble,
@@ -3726,7 +3726,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: alltypesnullorc
-                  Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
                   TableScan Vectorization:
                       native: true
                   Select Operator
@@ -3734,7 +3734,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: []
-                    Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 12288 Data size: 9580 Basic stats: COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: count()
                       Group By Vectorization:


[26/33] hive git commit: Revert "HIVE-12192 : Hive should carry out timestamp computations in UTC (Jesus Camacho Rodriguez via Ashutosh Chauhan)"

Posted by mm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
index 477825e..d14f0a9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
@@ -13,18 +13,19 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.serde;
 
+import java.sql.Timestamp;
 import java.util.Calendar;
+import java.util.Date;
 import java.util.GregorianCalendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
-import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
-import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
-
 import junit.framework.Assert;
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
+import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
+
 
 
 /**
@@ -41,7 +42,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.HOUR_OF_DAY, 0);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis());
+    Timestamp ts = new Timestamp(cal.getTimeInMillis());
     NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
     Assert.assertEquals(nt.getJulianDay(), 2440000);
 
@@ -56,7 +57,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.HOUR_OF_DAY, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
+    Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
     NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false);
 
     Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -69,7 +70,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.HOUR_OF_DAY, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
 
-    Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
+    Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
     NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false);
 
     Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -85,7 +86,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.HOUR_OF_DAY, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
+    ts1 = new Timestamp(cal1.getTimeInMillis());
     nt1 = NanoTimeUtils.getNanoTime(ts1, false);
 
     ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -98,7 +99,7 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.HOUR_OF_DAY, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
 
-    ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
+    ts2 = new Timestamp(cal2.getTimeInMillis());
     nt2 = NanoTimeUtils.getNanoTime(ts2, false);
 
     ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -116,7 +117,8 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 1);
     cal.set(Calendar.SECOND, 1);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
+    Timestamp ts = new Timestamp(cal.getTimeInMillis());
+    ts.setNanos(1);
 
     //(1*60*60 + 1*60 + 1) * 10e9 + 1
     NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -131,7 +133,8 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 59);
     cal.set(Calendar.SECOND, 59);
     cal.setTimeZone(TimeZone.getTimeZone("GMT"));
-    ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999);
+    ts = new Timestamp(cal.getTimeInMillis());
+    ts.setNanos(999999999);
 
     //(23*60*60 + 59*60 + 59)*10e9 + 999999999
     nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -146,7 +149,8 @@ public class TestParquetTimestampUtils extends TestCase {
     cal2.set(Calendar.MINUTE, 10);
     cal2.set(Calendar.SECOND, 0);
     cal2.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10);
+    Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
+    ts2.setNanos(10);
 
     Calendar cal1 = Calendar.getInstance();
     cal1.set(Calendar.YEAR,  1968);
@@ -156,7 +160,8 @@ public class TestParquetTimestampUtils extends TestCase {
     cal1.set(Calendar.MINUTE, 0);
     cal1.set(Calendar.SECOND, 0);
     cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
-    Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1);
+    Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
+    ts1.setNanos(1);
 
     NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, false);
     NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, false);
@@ -178,7 +183,8 @@ public class TestParquetTimestampUtils extends TestCase {
     cal.set(Calendar.MINUTE, 1);
     cal.set(Calendar.SECOND, 1);
     cal.setTimeZone(TimeZone.getTimeZone("US/Pacific"));
-    Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
+    Timestamp ts = new Timestamp(cal.getTimeInMillis());
+    ts.setNanos(1);
 
     /**
      * 17:00 PDT = 00:00 GMT (daylight-savings)
@@ -206,15 +212,15 @@ public class TestParquetTimestampUtils extends TestCase {
   public void testTimezoneless() {
     Timestamp ts1 = Timestamp.valueOf("2011-01-01 00:30:30.111111111");
     NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, true);
-    Assert.assertEquals(nt1.getJulianDay(), 2455562);
-    Assert.assertEquals(nt1.getTimeOfDayNanos(), 59430111111111L);
+    Assert.assertEquals(nt1.getJulianDay(), 2455563);
+    Assert.assertEquals(nt1.getTimeOfDayNanos(), 1830111111111L);
     Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, true);
     Assert.assertEquals(ts1Fetched.toString(), ts1.toString());
 
     Timestamp ts2 = Timestamp.valueOf("2011-02-02 08:30:30.222222222");
     NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, true);
     Assert.assertEquals(nt2.getJulianDay(), 2455595);
-    Assert.assertEquals(nt2.getTimeOfDayNanos(), 1830222222222L);
+    Assert.assertEquals(nt2.getTimeOfDayNanos(), 30630222222222L);
     Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, true);
     Assert.assertEquals(ts2Fetched.toString(), ts2.toString());
   }
@@ -245,7 +251,7 @@ public class TestParquetTimestampUtils extends TestCase {
 
     //test some extreme cases.
     verifyTsString("9999-09-09 09:09:09.999999999", local);
-    verifyTsString("0001-01-01 00:00:00", local);
+    verifyTsString("0001-01-01 00:00:00.0", local);
   }
 
   private void verifyTsString(String tsString, boolean local) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
index 97695c2..406cece 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
@@ -19,11 +19,11 @@ package org.apache.hadoop.hive.ql.parse;
 
 import static org.junit.Assert.*;
 
+import java.sql.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.junit.Test;
 
 public class TestSemanticAnalyzer {
@@ -61,7 +61,7 @@ public class TestSemanticAnalyzer {
     BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, colValue);
     assertEquals(result, partSpec.get(colName));
     if (colValue instanceof Date) {
-      DateWritableV2 dw = new DateWritableV2((Date)colValue);
+      DateWritable dw = new DateWritable((Date)colValue);
       BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, dw);
       assertEquals(result, partSpec.get(colName));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
index 4770ab7..9f20ff6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
@@ -17,13 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf;
 
+import java.sql.Timestamp;
 import java.time.Instant;
 import java.time.ZoneId;
+import java.time.ZoneOffset;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.junit.Test;
 
 import junit.framework.TestCase;
@@ -36,56 +37,56 @@ public class TestUDFDateFormatGranularity extends TestCase {
   public void testTimestampToTimestampWithGranularity() throws Exception {
     // Running example
     // Friday 30th August 1985 02:47:02 AM
-    final TimestampWritableV2 t = new TimestampWritableV2(Timestamp.ofEpochMilli(494243222000L));
+    final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L));
     UDFDateFloor g;
 
     // Year granularity
     // Tuesday 1st January 1985 12:00:00 AM
     g = new UDFDateFloorYear();
-    TimestampWritableV2 i1 = g.evaluate(t);
-    assertEquals(473385600000L, i1.getTimestamp().toEpochMilli());
+    TimestampWritable i1 = g.evaluate(t);
+    assertEquals(473414400000L, i1.getTimestamp().getTime());
     
     // Quarter granularity
     // Monday 1st July 1985 12:00:00 AM
     g = new UDFDateFloorQuarter();
-    TimestampWritableV2 i2 = g.evaluate(t);
-    assertEquals(489024000000L, i2.getTimestamp().toEpochMilli());
+    TimestampWritable i2 = g.evaluate(t);
+    assertEquals(489049200000L, i2.getTimestamp().getTime());
 
     // Month granularity
     // Thursday 1st August 1985 12:00:00 AM
     g = new UDFDateFloorMonth();
-    TimestampWritableV2 i3 = g.evaluate(t);
-    assertEquals(491702400000L, i3.getTimestamp().toEpochMilli());
+    TimestampWritable i3 = g.evaluate(t);
+    assertEquals(491727600000L, i3.getTimestamp().getTime());
 
     // Week granularity
     // Monday 26th August 1985 12:00:00 AM
     g = new UDFDateFloorWeek();
-    TimestampWritableV2 i4 = g.evaluate(t);
-    assertEquals(493862400000L, i4.getTimestamp().toEpochMilli());
+    TimestampWritable i4 = g.evaluate(t);
+    assertEquals(493887600000L, i4.getTimestamp().getTime());
 
     // Day granularity
     // Friday 30th August 1985 12:00:00 AM
     g = new UDFDateFloorDay();
-    TimestampWritableV2 i5 = g.evaluate(t);
-    assertEquals(494208000000L, i5.getTimestamp().toEpochMilli());
+    TimestampWritable i5 = g.evaluate(t);
+    assertEquals(494233200000L, i5.getTimestamp().getTime());
 
     // Hour granularity
     // Friday 30th August 1985 02:00:00 AM
     g = new UDFDateFloorHour();
-    TimestampWritableV2 i6 = g.evaluate(t);
-    assertEquals(494240400000L, i6.getTimestamp().toEpochMilli());
+    TimestampWritable i6 = g.evaluate(t);
+    assertEquals(494240400000L, i6.getTimestamp().getTime());
 
     // Minute granularity
     // Friday 30th August 1985 02:47:00 AM
     g = new UDFDateFloorMinute();
-    TimestampWritableV2 i7 = g.evaluate(t);
-    assertEquals(494243220000L, i7.getTimestamp().toEpochMilli());
+    TimestampWritable i7 = g.evaluate(t);
+    assertEquals(494243220000L, i7.getTimestamp().getTime());
 
     // Second granularity
     // Friday 30th August 1985 02:47:02 AM
     g = new UDFDateFloorSecond();
-    TimestampWritableV2 i8 = g.evaluate(t);
-    assertEquals(494243222000L, i8.getTimestamp().toEpochMilli());
+    TimestampWritable i8 = g.evaluate(t);
+    assertEquals(494243222000L, i8.getTimestamp().getTime());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
index 7c2ee15..0db9370 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
@@ -19,19 +19,19 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import java.sql.Timestamp;
 
 public class TestGenericUDFAddMonths extends TestCase {
 
@@ -151,29 +151,41 @@ public class TestGenericUDFAddMonths extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    GenericUDFAddMonths udf = new GenericUDFAddMonths();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    boolean caught = false;
+    try {
+      GenericUDFAddMonths udf = new GenericUDFAddMonths();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+      ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-    udf.initialize(arguments);
-    runAndVerify("2014-02-30", 1, "2014-04-02", udf);
-    runAndVerify("2014-02-32", 1, "2014-04-04", udf);
-    runAndVerify("2014-01", 1, null, udf);
+      udf.initialize(arguments);
+      runAndVerify("2014-02-30", 1, "2014-04-02", udf);
+      runAndVerify("2014-02-32", 1, "2014-04-04", udf);
+      runAndVerify("2014-01", 1, null, udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testWrongTsStr() throws HiveException {
-    GenericUDFAddMonths udf = new GenericUDFAddMonths();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
-    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    boolean caught = false;
+    try {
+      GenericUDFAddMonths udf = new GenericUDFAddMonths();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+      ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-    udf.initialize(arguments);
+      udf.initialize(arguments);
 
-    runAndVerify("2014-02-30 10:30:00", 1, "2014-04-02", udf);
-    runAndVerify("2014-02-32 10:30:00", 1, "2014-04-04", udf);
-    runAndVerify("2014/01/31 10:30:00", 1, null, udf);
-    runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf);
+      runAndVerify("2014-02-30 10:30:00", 1, "2014-04-02", udf);
+      runAndVerify("2014-02-32 10:30:00", 1, "2014-04-04", udf);
+      runAndVerify("2014/01/31 10:30:00", 1, null, udf);
+      runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testAddMonthsShort() throws HiveException {
@@ -238,7 +250,7 @@ public class TestGenericUDFAddMonths extends TestCase {
 
   private void runAndVerify(Timestamp ts, int months, Text dateFormat, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritableV2(ts));
+    DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritable(ts));
     DeferredObject valueObj1 = new DeferredJavaObject(new IntWritable(months));
     DeferredObject valueObj2 = new DeferredJavaObject(dateFormat);
     DeferredObject[] args = {valueObj0, valueObj1, valueObj2};

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
index dcb4d9c..0acb46d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
@@ -18,22 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFDate extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDate udf = new GenericUDFDate();
@@ -43,13 +43,13 @@ public class TestGenericUDFDate extends TestCase {
     udf.initialize(arguments);
     DeferredObject valueObj = new DeferredJavaObject(new Text("2009-07-30"));
     DeferredObject[] args = {valueObj};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
     assertEquals("to_date() test for STRING failed ", "2009-07-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritableV2) udf.evaluate(nullArgs);
+    output = (DateWritable) udf.evaluate(nullArgs);
     assertNull("to_date() with null STRING", output);
   }
 
@@ -59,16 +59,16 @@ public class TestGenericUDFDate extends TestCase {
     ObjectInspector[] arguments = {valueOI};
 
     udf.initialize(arguments);
-    DeferredObject valueObj = new DeferredJavaObject(new TimestampWritableV2(
-        Timestamp.valueOf(LocalDateTime.of(109, 06, 30, 4, 17, 52, 0).toString())));
+    DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
+        30, 4, 17, 52, 0)));
     DeferredObject[] args = {valueObj};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("to_date() test for TIMESTAMP failed ", "0109-06-30", output.toString());
+    assertEquals("to_date() test for TIMESTAMP failed ", "2009-07-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritableV2) udf.evaluate(nullArgs);
+    output = (DateWritable) udf.evaluate(nullArgs);
     assertNull("to_date() with null TIMESTAMP", output);
   }
 
@@ -78,15 +78,15 @@ public class TestGenericUDFDate extends TestCase {
     ObjectInspector[] arguments = {valueOI};
 
     udf.initialize(arguments);
-    DeferredObject valueObj = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 30)));
+    DeferredObject valueObj = new DeferredJavaObject(new DateWritable(new Date(109, 06, 30)));
     DeferredObject[] args = {valueObj};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("to_date() test for DATEWRITABLE failed ", "0109-06-30", output.toString());
+    assertEquals("to_date() test for DATEWRITABLE failed ", "2009-07-30", output.toString());
 
     // Try with null args
     DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
-    output = (DateWritableV2) udf.evaluate(nullArgs);
+    output = (DateWritable) udf.evaluate(nullArgs);
     assertNull("to_date() with null DATE", output);
   }
 
@@ -97,7 +97,7 @@ public class TestGenericUDFDate extends TestCase {
 
     udf.initialize(arguments);
     DeferredObject[] args = { new DeferredJavaObject(null) };
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
     // Try with null VOID
     assertNull("to_date() with null DATE ", output);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
index d74a4ef..9caf3b7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
@@ -18,21 +18,21 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFDateAdd extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateAdd udf = new GenericUDFDateAdd();
@@ -44,7 +44,7 @@ public class TestGenericUDFDateAdd extends TestCase {
     DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
     assertEquals("date_add() test for STRING failed ", "2009-07-22", output.toString());
 
@@ -66,13 +66,13 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
-        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString())));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
+        20, 4, 17, 52, 0)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for TIMESTAMP failed ", "0109-06-23", output.toString());
+    assertEquals("date_add() test for TIMESTAMP failed ", "2009-07-23", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -93,12 +93,12 @@ public class TestGenericUDFDateAdd extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for DATEWRITABLE failed ", "0109-06-24", output.toString());
+    assertEquals("date_add() test for DATEWRITABLE failed ", "2009-07-24", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -118,12 +118,12 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for BYTE failed ", "0109-06-24", output.toString());
+    assertEquals("date_add() test for BYTE failed ", "2009-07-24", output.toString());
   }
 
   public void testShortDataTypeAsDays() throws HiveException {
@@ -133,11 +133,11 @@ public class TestGenericUDFDateAdd extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for SHORT failed ", "0109-06-24", output.toString());
+    assertEquals("date_add() test for SHORT failed ", "2009-07-24", output.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
index ea183d4..3f4ea3f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
@@ -18,22 +18,22 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFDateDiff extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateDiff udf = new GenericUDFDateDiff();
@@ -67,10 +67,10 @@ public class TestGenericUDFDateDiff extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
-        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 0, 0, 0, 0).toString())));
-    DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritableV2(
-        Timestamp.valueOf(LocalDateTime.of(109, 06, 17, 0, 0, 0, 0).toString())));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
+        20, 0, 0, 0, 0)));
+    DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
+        17, 0, 0, 0, 0)));
     DeferredObject[] args = {valueObj1, valueObj2};
     IntWritable output = (IntWritable) udf.evaluate(args);
 
@@ -95,8 +95,8 @@ public class TestGenericUDFDateDiff extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
-    DeferredObject valueObj2 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 10)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+    DeferredObject valueObj2 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 10)));
     DeferredObject[] args = {valueObj1, valueObj2};
     IntWritable output = (IntWritable) udf.evaluate(args);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
index 6a3cdda..d29d964 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
@@ -17,19 +17,21 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.Text;
+import org.junit.Assume;
 
 public class TestGenericUDFDateFormat extends TestCase {
 
@@ -65,18 +67,24 @@ public class TestGenericUDFDateFormat extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    GenericUDFDateFormat udf = new GenericUDFDateFormat();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    Text fmtText = new Text("EEEE");
-    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
-        .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
-    ObjectInspector[] arguments = {valueOI0, valueOI1};
-
-    udf.initialize(arguments);
-    runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
-    runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
-    runAndVerifyStr("01/14/2014", fmtText, null, udf);
-    runAndVerifyStr(null, fmtText, null, udf);
+    boolean caught = false;
+    try {
+      GenericUDFDateFormat udf = new GenericUDFDateFormat();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      Text fmtText = new Text("EEEE");
+      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
+              .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
+      ObjectInspector[] arguments = { valueOI0, valueOI1 };
+
+      udf.initialize(arguments);
+      runAndVerifyStr("2016-02-30 10:30:45", fmtText, "Tuesday", udf);
+      runAndVerifyStr("2014-01-32", fmtText, "Saturday", udf);
+      runAndVerifyStr("01/14/2014", fmtText, null, udf);
+      runAndVerifyStr(null, fmtText, null, udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testDateFormatDate() throws HiveException {
@@ -157,7 +165,7 @@ public class TestGenericUDFDateFormat extends TestCase {
 
   private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritable(
         Date.valueOf(str)) : null);
     DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
     DeferredObject[] args = { valueObj0, valueObj1 };
@@ -167,7 +175,7 @@ public class TestGenericUDFDateFormat extends TestCase {
 
   private void runAndVerifyTs(String str, Text fmtText, String expResult, GenericUDF udf)
       throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
         Timestamp.valueOf(str)) : null);
     DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
     DeferredObject[] args = { valueObj0, valueObj1 };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
index c71c2b7..cb00cfd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
@@ -18,21 +18,21 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.time.LocalDateTime;
+import java.sql.Date;
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFDateSub extends TestCase {
   public void testStringToDate() throws HiveException {
     GenericUDFDateSub udf = new GenericUDFDateSub();
@@ -44,7 +44,7 @@ public class TestGenericUDFDateSub extends TestCase {
     DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
     assertEquals("date_sub() test for STRING failed ", "2009-07-18", output.toString());
 
@@ -66,13 +66,13 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritableV2(
-        Timestamp.valueOf(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0).toString())));
+    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
+        20, 4, 17, 52, 0)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_sub() test for TIMESTAMP failed ", "0109-06-17", output.toString());
+    assertEquals("date_sub() test for TIMESTAMP failed ", "2009-07-17", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -93,12 +93,12 @@ public class TestGenericUDFDateSub extends TestCase {
 
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_sub() test for DATEWRITABLE failed ", "0109-06-16", output.toString());
+    assertEquals("date_sub() test for DATEWRITABLE failed ", "2009-07-16", output.toString());
 
     // Test with null args
     args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
@@ -118,12 +118,12 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for BYTE failed ", "0109-06-16", output.toString());
+    assertEquals("date_add() test for BYTE failed ", "2009-07-16", output.toString());
   }
 
   public void testShortDataTypeAsDays() throws HiveException {
@@ -133,11 +133,11 @@ public class TestGenericUDFDateSub extends TestCase {
     ObjectInspector[] arguments = {valueOI1, valueOI2};
 
     udf.initialize(arguments);
-    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+    DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
     DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
     DeferredObject[] args = {valueObj1, valueObj2};
-    DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
+    DateWritable output = (DateWritable) udf.evaluate(args);
 
-    assertEquals("date_add() test for SHORT failed ", "0109-06-16", output.toString());
+    assertEquals("date_add() test for SHORT failed ", "2009-07-16", output.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
index bb9918c..bc8572e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
@@ -18,12 +18,17 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
 import junit.framework.TestCase;

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
index 9787454..4677aa7 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
@@ -18,13 +18,12 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import junit.framework.TestCase;
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -33,6 +32,8 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
+import java.sql.Date;
+
 public class TestGenericUDFGreatest extends TestCase {
 
   public void testOneArg() throws HiveException {
@@ -209,7 +210,7 @@ public class TestGenericUDFGreatest extends TestCase {
     } else if (o instanceof Double) {
       return o != null ? new DoubleWritable((Double) o) : null;
     } else if (o instanceof Date) {
-      return o != null ? new DateWritableV2((Date) o) : null;
+      return o != null ? new DateWritable((Date) o) : null;
     } else if (o instanceof Byte) {
       return o != null ? new ByteWritable((Byte) o): null;
     } else if (o instanceof Short) {
@@ -230,8 +231,8 @@ public class TestGenericUDFGreatest extends TestCase {
       return ((IntWritable) o).get();
     } else if (o instanceof DoubleWritable) {
       return ((DoubleWritable) o).get();
-    } else if (o instanceof DateWritableV2) {
-      return ((DateWritableV2) o).get();
+    } else if (o instanceof DateWritable) {
+      return ((DateWritable) o).get();
     } else if (o instanceof ByteWritable) {
       return ((ByteWritable) o).get();
     } else if (o instanceof ShortWritable) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
index 972ab35..7d7c84d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
@@ -17,17 +17,18 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFLastDay extends TestCase {
 
   public void testLastDay() throws HiveException {
@@ -64,29 +65,41 @@ public class TestGenericUDFLastDay extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    GenericUDFLastDay udf = new GenericUDFLastDay();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector[] arguments = {valueOI0};
-
-    udf.initialize(arguments);
-
-    runAndVerify("2016-02-30", "2016-03-31", udf);
-    runAndVerify("2014-01-32", "2014-02-28", udf);
-    runAndVerify("01/14/2014", null, udf);
-    runAndVerify(null, null, udf);
+    boolean caught = false;
+    try {
+      GenericUDFLastDay udf = new GenericUDFLastDay();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector[] arguments = { valueOI0 };
+
+      udf.initialize(arguments);
+
+      runAndVerify("2016-02-30", "2016-03-31", udf);
+      runAndVerify("2014-01-32", "2014-02-28", udf);
+      runAndVerify("01/14/2014", null, udf);
+      runAndVerify(null, null, udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testWrongTsStr() throws HiveException {
-    GenericUDFLastDay udf = new GenericUDFLastDay();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector[] arguments = { valueOI0 };
-
-    udf.initialize(arguments);
-
-    runAndVerify("2016-02-30 10:30:45", "2016-03-31", udf);
-    runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
-    runAndVerify("01/14/2014 10:30:45", null, udf);
-    runAndVerify("2016-02-28T10:30:45", null, udf);
+    boolean caught = false;
+    try {
+      GenericUDFLastDay udf = new GenericUDFLastDay();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector[] arguments = { valueOI0 };
+
+      udf.initialize(arguments);
+
+      runAndVerify("2016-02-30 10:30:45", "2016-03-31", udf);
+      runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
+      runAndVerify("01/14/2014 10:30:45", null, udf);
+      runAndVerify("2016-02-28T10:30:45", "2016-02-29", udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testLastDayTs() throws HiveException {
@@ -119,7 +132,7 @@ public class TestGenericUDFLastDay extends TestCase {
   }
 
   private void runAndVerifyTs(String str, String expResult, GenericUDF udf) throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
         Timestamp.valueOf(str)) : null);
     DeferredObject[] args = { valueObj0 };
     Text output = (Text) udf.evaluate(args);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
index cccc70e..f966cb0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
@@ -17,15 +17,16 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -209,7 +210,7 @@ public class TestGenericUDFLeast extends TestCase {
     } else if (o instanceof Double) {
       return o != null ? new DoubleWritable((Double) o) : null;
     } else if (o instanceof Date) {
-      return o != null ? new DateWritableV2((Date) o) : null;
+      return o != null ? new DateWritable((Date) o) : null;
     } else if (o instanceof Byte) {
       return o != null ? new ByteWritable((Byte) o): null;
     } else if (o instanceof Short) {
@@ -230,8 +231,8 @@ public class TestGenericUDFLeast extends TestCase {
       return ((IntWritable) o).get();
     } else if (o instanceof DoubleWritable) {
       return ((DoubleWritable) o).get();
-    } else if (o instanceof DateWritableV2) {
-      return ((DateWritableV2) o).get();
+    } else if (o instanceof DateWritable) {
+      return ((DateWritable) o).get();
     } else if (o instanceof ByteWritable) {
       return ((ByteWritable) o).get();
     } else if (o instanceof ShortWritable) {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
index e9f32a1..7eee550 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
@@ -17,13 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -59,14 +60,20 @@ public class TestGenericUDFMonthsBetween extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween();
-    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector[] arguments = {valueOI1, valueOI2};
-    udf.initialize(arguments);
-
-    runTestStr("2002-03", "2002-02-24", null, udf);
-    runTestStr("2002-03-24", "2002-02", null, udf);
+    boolean caught = false;
+    try {
+      GenericUDFMonthsBetween udf = new GenericUDFMonthsBetween();
+      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector[] arguments = { valueOI1, valueOI2 };
+      udf.initialize(arguments);
+
+      runTestStr("2002-03", "2002-02-24", null, udf);
+      runTestStr("2002-03-24", "2002-02", null, udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testMonthsBetweenForString(GenericUDFMonthsBetween udf) throws HiveException {
@@ -180,7 +187,7 @@ public class TestGenericUDFMonthsBetween extends TestCase {
     runTestTs("2002-03-24 00:00:00", "2002-02-24 10:30:00", 1.0, udf);
     runTestTs("2002-03-24 10:30:00", "2002-02-24 00:00:00", 1.0, udf);
 
-    runTestTs("2003-04-23 23:59:59", "2003-03-24 00:00:00", 0.99999963, udf);
+    runTestTs("2003-04-23 23:59:59", "2003-03-24 00:0:0", 0.99999963, udf);
   }
 
   public void testMonthsBetweenForDate() throws HiveException {
@@ -246,8 +253,8 @@ public class TestGenericUDFMonthsBetween extends TestCase {
 
   protected void runTestTs(String ts1, String ts2, Double expDiff, GenericUDFMonthsBetween udf)
       throws HiveException {
-    TimestampWritableV2 tsWr1 = ts1 == null ? null : new TimestampWritableV2(Timestamp.valueOf(ts1));
-    TimestampWritableV2 tsWr2 = ts2 == null ? null : new TimestampWritableV2(Timestamp.valueOf(ts2));
+    TimestampWritable tsWr1 = ts1 == null ? null : new TimestampWritable(Timestamp.valueOf(ts1));
+    TimestampWritable tsWr2 = ts2 == null ? null : new TimestampWritable(Timestamp.valueOf(ts2));
     DeferredJavaObject valueObj1 = new DeferredJavaObject(tsWr1);
     DeferredJavaObject valueObj2 = new DeferredJavaObject(tsWr2);
     DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };
@@ -262,8 +269,8 @@ public class TestGenericUDFMonthsBetween extends TestCase {
 
   protected void runTestDt(String dt1, String dt2, Double expDiff, GenericUDFMonthsBetween udf)
       throws HiveException {
-    DateWritableV2 dtWr1 = dt1 == null ? null : new DateWritableV2(Date.valueOf(dt1));
-    DateWritableV2 dtWr2 = dt2 == null ? null : new DateWritableV2(Date.valueOf(dt2));
+    DateWritable dtWr1 = dt1 == null ? null : new DateWritable(Date.valueOf(dt1));
+    DateWritable dtWr2 = dt2 == null ? null : new DateWritable(Date.valueOf(dt2));
     DeferredJavaObject valueObj1 = new DeferredJavaObject(dtWr1);
     DeferredJavaObject valueObj2 = new DeferredJavaObject(dtWr2);
     DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
index c211fdd..af7f0b0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
@@ -71,21 +71,27 @@ public class TestGenericUDFNextDay extends TestCase {
   }
 
   public void testNotValidValues() throws Exception {
-    GenericUDFNextDay udf = new GenericUDFNextDay();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    boolean caught = false;
+    try {
+      GenericUDFNextDay udf = new GenericUDFNextDay();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
-    udf.initialize(arguments);
+      udf.initialize(arguments);
 
-    runAndVerify("01/14/2015", "TU", null, udf);
-    runAndVerify("2015-01-14", "VT", null, udf);
-    runAndVerify("2015-02-30", "WE", "2015-03-04", udf);
-    runAndVerify("2015-02-32", "WE", "2015-03-11", udf);
-    runAndVerify("2015-02-30 10:30:00", "WE", "2015-03-04", udf);
-    runAndVerify("2015-02-32 10:30:00", "WE", "2015-03-11", udf);
-    runAndVerify("2015/01/14 14:04:34", "SAT", null, udf);
-    runAndVerify("2015-01-14T14:04:34", "SAT", null, udf);
+      runAndVerify("01/14/2015", "TU", null, udf);
+      runAndVerify("2015-01-14", "VT", null, udf);
+      runAndVerify("2015-02-30", "WE", "2015-03-04", udf);
+      runAndVerify("2015-02-32", "WE", "2015-03-11", udf);
+      runAndVerify("2015-02-30 10:30:00", "WE", "2015-03-04", udf);
+      runAndVerify("2015-02-32 10:30:00", "WE", "2015-03-11", udf);
+      runAndVerify("2015/01/14 14:04:34", "SAT", null, udf);
+      runAndVerify("2015-01-14T14:04:34", "SAT", "2015-01-17", udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testNextDayErrorArg1() throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
index 281b0d5..1402467 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -109,7 +109,7 @@ public class TestGenericUDFNullif {
 
     ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector,
         PrimitiveObjectInspectorFactory.writableByteObjectInspector };
-    DeferredObject[] args = { new DeferredJavaObject(new DateWritableV2(4)),
+    DeferredObject[] args = { new DeferredJavaObject(new DateWritable(4)),
         new DeferredJavaObject(new ByteWritable((byte) 4)) };
 
     udf.initialize(inputOIs);
@@ -123,8 +123,8 @@ public class TestGenericUDFNullif {
         PrimitiveObjectInspectorFactory.writableDateObjectInspector,
         PrimitiveObjectInspectorFactory.writableDateObjectInspector };
     DeferredObject[] args = {
-        new DeferredJavaObject(new DateWritableV2(4)),
-        new DeferredJavaObject(new DateWritableV2(4))
+        new DeferredJavaObject(new DateWritable(4)),
+        new DeferredJavaObject(new DateWritable(4))
         };
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
index 504aa7a..efc9514 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
@@ -18,25 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -281,8 +282,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testDateMinusIntervalYearMonth() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    DateWritableV2 left =
-        new DateWritableV2(Date.valueOf("2004-02-15"));
+    DateWritable left =
+        new DateWritable(Date.valueOf("2004-02-15"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
     ObjectInspector[] inputOIs = {
@@ -296,7 +297,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
-    DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
+    DateWritable res = (DateWritable) udf.evaluate(args);
     Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
   }
 
@@ -304,8 +305,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampMinusIntervalYearMonth() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    TimestampWritableV2 left =
-        new TimestampWritableV2(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
     ObjectInspector[] inputOIs = {
@@ -319,7 +320,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp());
   }
 
@@ -350,8 +351,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampMinusIntervalDayTime() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    TimestampWritableV2 left =
-        new TimestampWritableV2(Timestamp.valueOf("2001-01-02 2:3:4.567"));
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
     ObjectInspector[] inputOIs = {
@@ -365,7 +366,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"), res.getTimestamp());
   }
 
@@ -373,8 +374,8 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
   public void testDateMinusIntervalDayTime() throws Exception {
     GenericUDFOPMinus udf = new GenericUDFOPMinus();
 
-    DateWritableV2 left =
-        new DateWritableV2(Date.valueOf("2001-01-01"));
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-01-01"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555"));
     ObjectInspector[] inputOIs = {
@@ -388,7 +389,7 @@ public class TestGenericUDFOPMinus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2000-12-30 23:59:59.445"), res.getTimestamp());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
index 2169999..5350a00 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
@@ -18,25 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -288,8 +289,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     HiveIntervalYearMonthWritable left =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
-    DateWritableV2 right =
-        new DateWritableV2(Date.valueOf("2001-06-15"));
+    DateWritable right =
+        new DateWritable(Date.valueOf("2001-06-15"));
     ObjectInspector[] inputOIs = {
         PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
         PrimitiveObjectInspectorFactory.writableDateObjectInspector
@@ -301,7 +302,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
-    DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
+    DateWritable res = (DateWritable) udf.evaluate(args);
     Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
   }
 
@@ -309,8 +310,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
   public void testDatePlusIntervalYearMonth() throws Exception {
     GenericUDFOPPlus udf = new GenericUDFOPPlus();
 
-    DateWritableV2 left =
-        new DateWritableV2(Date.valueOf("2001-06-15"));
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-06-15"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
     ObjectInspector[] inputOIs = {
@@ -324,7 +325,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
-    DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
+    DateWritable res = (DateWritable) udf.evaluate(args);
     Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
   }
 
@@ -334,8 +335,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     HiveIntervalYearMonthWritable left =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
-    TimestampWritableV2 right =
-        new TimestampWritableV2(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
+    TimestampWritable right =
+        new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
     ObjectInspector[] inputOIs = {
         PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
         PrimitiveObjectInspectorFactory.writableTimestampObjectInspector
@@ -347,7 +348,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
   }
 
@@ -355,8 +356,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampPlusIntervalYearMonth() throws Exception {
     GenericUDFOPPlus udf = new GenericUDFOPPlus();
 
-    TimestampWritableV2 left =
-        new TimestampWritableV2(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
     HiveIntervalYearMonthWritable right =
         new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
     ObjectInspector[] inputOIs = {
@@ -370,7 +371,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
   }
 
@@ -403,8 +404,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     HiveIntervalDayTimeWritable left =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
-    TimestampWritableV2 right =
-        new TimestampWritableV2(Timestamp.valueOf("2001-01-01 00:00:00"));
+    TimestampWritable right =
+        new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
     ObjectInspector[] inputOIs = {
         PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
         PrimitiveObjectInspectorFactory.writableTimestampObjectInspector
@@ -416,7 +417,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
   }
 
@@ -424,8 +425,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
   public void testTimestampPlusIntervalDayTime() throws Exception {
     GenericUDFOPPlus udf = new GenericUDFOPPlus();
 
-    TimestampWritableV2 left =
-        new TimestampWritableV2(Timestamp.valueOf("2001-01-01 00:00:00"));
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
     ObjectInspector[] inputOIs = {
@@ -439,7 +440,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
   }
 
@@ -449,8 +450,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
 
     HiveIntervalDayTimeWritable left =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
-    DateWritableV2 right =
-        new DateWritableV2(Date.valueOf("2001-01-01"));
+    DateWritable right =
+        new DateWritable(Date.valueOf("2001-01-01"));
     ObjectInspector[] inputOIs = {
         PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
         PrimitiveObjectInspectorFactory.writableDateObjectInspector
@@ -463,7 +464,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
     // Date + day-time interval = timestamp
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
   }
 
@@ -471,8 +472,8 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
   public void testDatePlusIntervalDayTime() throws Exception {
     GenericUDFOPPlus udf = new GenericUDFOPPlus();
 
-    DateWritableV2 left =
-        new DateWritableV2(Date.valueOf("2001-01-01"));
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-01-01"));
     HiveIntervalDayTimeWritable right =
         new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
     ObjectInspector[] inputOIs = {
@@ -487,7 +488,7 @@ public class TestGenericUDFOPPlus extends AbstractTestGenericUDFOPNumeric {
     // Date + day-time interval = timestamp
     PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
     Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
-    TimestampWritableV2 res = (TimestampWritableV2) udf.evaluate(args);
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
     Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
index 5e6c14e..4b09aa1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
@@ -17,15 +17,16 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import junit.framework.TestCase;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
@@ -82,16 +83,22 @@ public class TestGenericUDFQuarter extends TestCase {
   }
 
   public void testWrongDateStr() throws HiveException {
-    GenericUDFQuarter udf = new GenericUDFQuarter();
-    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    ObjectInspector[] arguments = {valueOI0};
-
-    udf.initialize(arguments);
-
-    runAndVerifyStr("2016-03-35", 2, udf);
-    runAndVerifyStr("2014-01-32", 1, udf);
-    runAndVerifyStr("01/14/2014", null, udf);
-    runAndVerifyStr(null, null, udf);
+    boolean caught = false;
+    try {
+      GenericUDFQuarter udf = new GenericUDFQuarter();
+      ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      ObjectInspector[] arguments = { valueOI0 };
+
+      udf.initialize(arguments);
+
+      runAndVerifyStr("2016-03-35", 2, udf);
+      runAndVerifyStr("2014-01-32", 1, udf);
+      runAndVerifyStr("01/14/2014", null, udf);
+      runAndVerifyStr(null, null, udf);
+    } catch (HiveException e) {
+      caught = true;
+    }
+    assertTrue(caught);
   }
 
   public void testQuarterDt() throws HiveException {
@@ -159,7 +166,7 @@ public class TestGenericUDFQuarter extends TestCase {
   }
 
   private void runAndVerifyDt(String str, Integer expResult, GenericUDF udf) throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritable(
         Date.valueOf(str)) : null);
     DeferredObject[] args = { valueObj0 };
     IntWritable output = (IntWritable) udf.evaluate(args);
@@ -172,7 +179,7 @@ public class TestGenericUDFQuarter extends TestCase {
   }
 
   private void runAndVerifyTs(String str, Integer expResult, GenericUDF udf) throws HiveException {
-    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2(
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
         Timestamp.valueOf(str)) : null);
     DeferredObject[] args = { valueObj0 };
     IntWritable output = (IntWritable) udf.evaluate(args);

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
index 5b6e2ab..fcdb49c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
@@ -18,14 +18,14 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import static java.util.Arrays.asList;
 
-import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -93,22 +93,22 @@ public class TestGenericUDFSortArray {
     udf.initialize(inputOIs);
 
     Object i1 = asList(new Text("a"), new DoubleWritable(3.1415),
-        new DateWritableV2(Date.of(2015, 5, 26)),
+        new DateWritable(new Date(2015, 5, 26)),
         asList(new IntWritable(1), new IntWritable(3),
             new IntWritable(2), new IntWritable(4)));
 
     Object i2 = asList(new Text("b"), new DoubleWritable(3.14),
-        new DateWritableV2(Date.of(2015, 5, 26)),
+        new DateWritable(new Date(2015, 5, 26)),
         asList(new IntWritable(1), new IntWritable(3),
             new IntWritable(2), new IntWritable(4)));
 
     Object i3 = asList(new Text("a"), new DoubleWritable(3.1415),
-        new DateWritableV2(Date.of(2015, 5, 25)),
+        new DateWritable(new Date(2015, 5, 25)),
         asList(new IntWritable(1), new IntWritable(3),
             new IntWritable(2), new IntWritable(5)));
 
     Object i4 = asList(new Text("a"), new DoubleWritable(3.1415),
-        new DateWritableV2(Date.of(2015, 5, 25)),
+        new DateWritable(new Date(2015, 5, 25)),
         asList(new IntWritable(1), new IntWritable(3),
             new IntWritable(2), new IntWritable(4)));
 

http://git-wip-us.apache.org/repos/asf/hive/blob/33088de0/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
index 61623d5..d840238 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.common.type.Date;
-import org.apache.hadoop.hive.common.type.Timestamp;
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
-import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.LongWritable;
@@ -64,13 +65,13 @@ public class TestGenericUDFToUnixTimestamp extends TestCase {
 
     Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00");
     runAndVerify(udf,
-        new TimestampWritableV2(ts),
-        new LongWritable(ts.toEpochSecond()));
+        new TimestampWritable(ts),
+        new LongWritable(ts.getTime() / 1000));
 
     ts = Timestamp.valueOf("2001-02-03 01:02:03");
     runAndVerify(udf,
-        new TimestampWritableV2(ts),
-        new LongWritable(ts.toEpochSecond()));
+        new TimestampWritable(ts),
+        new LongWritable(ts.getTime() / 1000));
 
     // test null values
     runAndVerify(udf, null, null);
@@ -84,8 +85,8 @@ public class TestGenericUDFToUnixTimestamp extends TestCase {
 
     Date date = Date.valueOf("1970-01-01");
     runAndVerify(udf,
-        new DateWritableV2(date),
-        new LongWritable(date.toEpochSecond()));
+        new DateWritable(date),
+        new LongWritable(date.getTime() / 1000));
 
     // test null values
     runAndVerify(udf, null, null);
@@ -100,7 +101,7 @@ public class TestGenericUDFToUnixTimestamp extends TestCase {
     String val = "2001-01-01 01:02:03";
     runAndVerify(udf1,
         new Text(val),
-        new LongWritable(Timestamp.valueOf(val).toEpochSecond()));
+        new LongWritable(Timestamp.valueOf(val).getTime() / 1000));
 
     // test null values
     runAndVerify(udf1, null, null);
@@ -115,7 +116,7 @@ public class TestGenericUDFToUnixTimestamp extends TestCase {
     runAndVerify(udf2,
         new Text(val),
         new Text(format),
-        new LongWritable(Date.valueOf(val).toEpochSecond()));
+        new LongWritable(Date.valueOf(val).getTime() / 1000));
 
     // test null values
     runAndVerify(udf2, null, null, null);