You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by li...@apache.org on 2017/05/11 06:47:23 UTC

[2/3] hive git commit: HIVE-14412: Add timestamp with time zone (Rui Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin, Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 1c78c1a..003e09f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -818,6 +818,8 @@ nonReserved
     | KW_EXPRESSION
     | KW_DETAIL
     | KW_WAIT
+    | KW_ZONE
+    | KW_TIMESTAMPTZ
 
 ;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 82141be..f678d0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -747,6 +747,8 @@ public class TypeCheckProcFactory {
           serdeConstants.DATE_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
           serdeConstants.TIMESTAMP_TYPE_NAME);
+      conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMPTZ,
+          serdeConstants.TIMESTAMPTZ_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
           serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index bda2050..76f7dae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -100,6 +100,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjec
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampTZObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -743,7 +744,8 @@ public class StatsUtils {
     } else if (colTypeLowerCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
       cs.setAvgColLen(csd.getBinaryStats().getAvgColLen());
       cs.setNumNulls(csd.getBinaryStats().getNumNulls());
-    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+        colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
       cs.setAvgColLen(JavaDataModel.get().lengthOfTimestamp());
     } else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
       cs.setAvgColLen(JavaDataModel.get().lengthOfDecimal());
@@ -1042,7 +1044,8 @@ public class StatsUtils {
         || colTypeLowerCase.equals(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME)
         || colTypeLowerCase.equals("long")) {
       return JavaDataModel.get().primitive2();
-    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+        colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
       return JavaDataModel.get().lengthOfTimestamp();
     } else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
       return JavaDataModel.get().lengthOfDate();
@@ -1079,7 +1082,8 @@ public class StatsUtils {
       return JavaDataModel.get().lengthForByteArrayOfSize(length);
     } else if (colTypeLowerCase.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
       return JavaDataModel.get().lengthForBooleanArrayOfSize(length);
-    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+    } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+        colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
       return JavaDataModel.get().lengthForTimestampArrayOfSize(length);
     } else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
       return JavaDataModel.get().lengthForDateArrayOfSize(length);
@@ -1164,7 +1168,8 @@ public class StatsUtils {
       return JavaDataModel.get().primitive2();
     } else if (oi instanceof WritableShortObjectInspector) {
       return JavaDataModel.get().primitive1();
-    } else if (oi instanceof WritableTimestampObjectInspector) {
+    } else if (oi instanceof WritableTimestampObjectInspector ||
+        oi instanceof WritableTimestampTZObjectInspector) {
       return JavaDataModel.get().lengthOfTimestamp();
     }
 
@@ -1543,7 +1548,8 @@ public class StatsUtils {
         } else if (colTypeLowerCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
           int acl = (int) Math.round(cs.getAvgColLen());
           sizeOf = JavaDataModel.get().lengthForByteArrayOfSize(acl);
-        } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+        } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+            colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
           sizeOf = JavaDataModel.get().lengthOfTimestamp();
         } else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
           sizeOf = JavaDataModel.get().lengthOfDecimal();

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 7cdf2c3..1605877 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index 5cacd59..c10552a 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
 import org.apache.hadoop.hive.serde2.lazy.LazyLong;
@@ -152,6 +153,15 @@ public class UDFToString extends UDF {
     }
   }
 
+  public Text evaluate(TimestampTZWritable i) {
+    if (i == null) {
+      return null;
+    } else {
+      t.set(i.toString());
+      return t;
+    }
+  }
+
   public Text evaluate(HiveDecimalWritable i) {
     if (i == null) {
       return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index 68d98f5..6b67dea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -406,6 +406,7 @@ public abstract class GenericUDF implements Closeable {
     case TIMESTAMP:
     case DATE:
     case VOID:
+    case TIMESTAMPTZ:
       outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
       break;
     default:
@@ -428,6 +429,7 @@ public abstract class GenericUDF implements Closeable {
     case CHAR:
     case TIMESTAMP:
     case DATE:
+    case TIMESTAMPTZ:
       break;
     default:
       throw new UDFArgumentTypeException(i, getFuncName()
@@ -502,6 +504,7 @@ public abstract class GenericUDF implements Closeable {
       break;
     case TIMESTAMP:
     case DATE:
+    case TIMESTAMPTZ:
       Object writableValue = converters[i].convert(obj);
       date = ((DateWritable) writableValue).get();
       break;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index 5a31e61..4247afd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -87,6 +87,7 @@ public class GenericUDFDate extends GenericUDF {
       timestampConverter = new TimestampConverter(argumentOI,
         PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
       break;
+    case TIMESTAMPTZ:
     case DATE:
       dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI,
           PrimitiveObjectInspectorFactory.writableDateObjectInspector);
@@ -120,6 +121,7 @@ public class GenericUDFDate extends GenericUDF {
           .getTimestamp();
       output.set(DateWritable.millisToDays(ts.getTime()));
       break;
+    case TIMESTAMPTZ:
     case DATE:
       DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
       output.set(dw);

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
new file mode 100644
index 0000000..e96012b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * Convert from string to TIMESTAMP WITH TIME ZONE.
+ */
+@Description(name = "timestamp with time zone",
+    value = "CAST(STRING as TIMESTAMP WITH TIME ZONE) - returns the" +
+        "timestamp with time zone represented by string.",
+    extended = "The string should be of format 'yyyy-MM-dd HH:mm:ss[.SSS...] ZoneId/ZoneOffset'. " +
+        "Examples of ZoneId and ZoneOffset are Asia/Shanghai and GMT+08:00. " +
+        "The time and zone parts are optional. If time is absent, '00:00:00.0' will be used. " +
+        "If zone is absent, the system time zone will be used.")
+public class GenericUDFToTimestampTZ extends GenericUDF {
+
+  private transient PrimitiveObjectInspector argumentOI;
+  private transient PrimitiveObjectInspectorConverter.TimestampTZConverter converter;
+
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length < 1) {
+      throw new UDFArgumentLengthException(
+          "The function CAST as TIMESTAMP WITH TIME ZONE requires at least one argument, got "
+              + arguments.length);
+    }
+    try {
+      argumentOI = (PrimitiveObjectInspector) arguments[0];
+      switch (argumentOI.getPrimitiveCategory()) {
+      case CHAR:
+      case VARCHAR:
+      case STRING:
+      case DATE:
+      case TIMESTAMP:
+      case TIMESTAMPTZ:
+        break;
+      default:
+        throw new UDFArgumentException("CAST as TIMESTAMP WITH TIME ZONE only allows" +
+            "string/date/timestamp/timestamp with time zone types");
+      }
+    } catch (ClassCastException e) {
+      throw new UDFArgumentException(
+          "The function CAST as TIMESTAMP WITH TIME ZONE takes only primitive types");
+    }
+    converter = new PrimitiveObjectInspectorConverter.TimestampTZConverter(argumentOI,
+        PrimitiveObjectInspectorFactory.writableTimestampTZObjectInspector);
+    return PrimitiveObjectInspectorFactory.writableTimestampTZObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object o0 = arguments[0].get();
+    if (o0 == null) {
+      return null;
+    }
+    return converter.convert(o0);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    return "CAST(" + children[0] + " AS TIMESTAMP WITH TIME ZONE)";
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
index 0dc6b19..8be8583 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
@@ -30,7 +30,7 @@ import org.junit.Test;
 
 /**
  * Parser tests for SQL11 Reserved KeyWords. Please find more information in
- * HIVE-6617. Total number : 82
+ * HIVE-6617. Total number : 83
  * ALL,ALTER,ARRAY,AS,AUTHORIZATION,BETWEEN,BIGINT,BINARY
  * ,BOOLEAN,BOTH,BY,CONSTRAINT
  * ,CREATE,CUBE,CURRENT_DATE,CURRENT_TIMESTAMP,CURSOR,
@@ -43,7 +43,7 @@ import org.junit.Test;
  * ,PRIMARY,PROCEDURE,RANGE,READS,
  * REFERENCES,REGEXP,REVOKE,RIGHT,RLIKE,ROLLUP,ROW
  * ,ROWS,SET,SMALLINT,TABLE,TIMESTAMP
- * ,TO,TRIGGER,TRUE,TRUNCATE,UNION,UPDATE,USER,USING,VALUES,WITH,
+ * ,TO,TRIGGER,TRUE,TRUNCATE,UNION,UPDATE,USER,USING,VALUES,WITH,TIME
  */
 public class TestSQL11ReservedKeyWordsNegative {
 	private static HiveConf conf;
@@ -1137,4 +1137,17 @@ public class TestSQL11ReservedKeyWordsNegative {
 		}
 	}
 
+  @Test
+  public void testSQL11ReservedKeyWords_TIME() {
+    try {
+      parse("CREATE TABLE TIME (col STRING)");
+      Assert.fail("Expected ParseException");
+    } catch (ParseException ex) {
+      Assert.assertEquals(
+          "Failure didn't match.",
+          "line 1:13 cannot recognize input near 'TIME' '(' 'col' in table name",
+          ex.getMessage());
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex.q b/ql/src/test/queries/clientnegative/serde_regex.q
index c9cfc7d..9a1776a 100644
--- a/ql/src/test/queries/clientnegative/serde_regex.q
+++ b/ql/src/test/queries/clientnegative/serde_regex.q
@@ -4,7 +4,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time TIMESTAMP,
+  `time` TIMESTAMP,
   request STRING,
   status INT,
   size INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex2.q b/ql/src/test/queries/clientnegative/serde_regex2.q
index a29bb9c..12e802e 100644
--- a/ql/src/test/queries/clientnegative/serde_regex2.q
+++ b/ql/src/test/queries/clientnegative/serde_regex2.q
@@ -5,7 +5,7 @@ USE default;
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -21,4 +21,4 @@ LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_reg
 LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
 
 -- raise an exception 
-SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
+SELECT * FROM serde_regex ORDER BY `time`;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex3.q b/ql/src/test/queries/clientnegative/serde_regex3.q
index 4e91f06..b7810b5 100644
--- a/ql/src/test/queries/clientnegative/serde_regex3.q
+++ b/ql/src/test/queries/clientnegative/serde_regex3.q
@@ -4,7 +4,7 @@ USE default;
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/create_like.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_like.q b/ql/src/test/queries/clientpositive/create_like.q
index bd39731..81172f3 100644
--- a/ql/src/test/queries/clientpositive/create_like.q
+++ b/ql/src/test/queries/clientpositive/create_like.q
@@ -84,7 +84,7 @@ DESCRIBE FORMATTED table6;
 drop table table5;
 
 create table orc_table (
-time string)
+`time` string)
 stored as ORC tblproperties ("orc.compress"="SNAPPY");
 
 create table orc_table_using_like like orc_table;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/join43.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/join43.q b/ql/src/test/queries/clientpositive/join43.q
index 12c45a6..b2e10dc 100644
--- a/ql/src/test/queries/clientpositive/join43.q
+++ b/ql/src/test/queries/clientpositive/join43.q
@@ -1,11 +1,11 @@
 set hive.mapred.mode=nonstrict;
-create table purchase_history (s string, product string, price double, time int);
+create table purchase_history (s string, product string, price double, `time` int);
 insert into purchase_history values ('1', 'Belt', 20.00, 21);
 insert into purchase_history values ('1', 'Socks', 3.50, 31);
 insert into purchase_history values ('3', 'Belt', 20.00, 51);
 insert into purchase_history values ('4', 'Shirt', 15.50, 59);
 
-create table cart_history (s string, cart_id int, time int);
+create table cart_history (s string, cart_id int, `time` int);
 insert into cart_history values ('1', 1, 10);
 insert into cart_history values ('1', 2, 20);
 insert into cart_history values ('1', 3, 30);
@@ -13,7 +13,7 @@ insert into cart_history values ('1', 4, 40);
 insert into cart_history values ('3', 5, 50);
 insert into cart_history values ('4', 6, 60);
 
-create table events (s string, st2 string, n int, time int);
+create table events (s string, st2 string, n int, `time` int);
 insert into events values ('1', 'Bob', 1234, 20);
 insert into events values ('1', 'Bob', 1234, 30);
 insert into events values ('1', 'Bob', 1234, 25);
@@ -26,30 +26,30 @@ select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list;
 
 select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list;
 
 explain
@@ -57,28 +57,28 @@ select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list;
 
 select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/serde_regex.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/serde_regex.q b/ql/src/test/queries/clientpositive/serde_regex.q
index e21c6e1..fc716ed 100644
--- a/ql/src/test/queries/clientpositive/serde_regex.q
+++ b/ql/src/test/queries/clientpositive/serde_regex.q
@@ -4,7 +4,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -20,7 +20,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -35,9 +35,9 @@ STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
 LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
 
-SELECT * FROM serde_regex ORDER BY time;
+SELECT * FROM serde_regex ORDER BY `time`;
 
-SELECT host, size, status, time from serde_regex ORDER BY time;
+SELECT host, size, status, `time` from serde_regex ORDER BY `time`;
 
 DROP TABLE serde_regex;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz.q b/ql/src/test/queries/clientpositive/timestamptz.q
new file mode 100644
index 0000000..176fefd
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz.q
@@ -0,0 +1,11 @@
+explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone);
+select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone);
+
+explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
+select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
+
+explain select cast('2016-01-03Europe/London' as timestamptz);
+select cast('2016-01-03Europe/London' as timestamptz);
+
+explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz);
+select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz);

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz_1.q b/ql/src/test/queries/clientpositive/timestamptz_1.q
new file mode 100644
index 0000000..c11aea2
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_1.q
@@ -0,0 +1,25 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz1;
+
+create table tstz1(t timestamp with time zone);
+
+insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone);
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz_2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz_2.q b/ql/src/test/queries/clientpositive/timestamptz_2.q
new file mode 100644
index 0000000..a335f52
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_2.q
@@ -0,0 +1,19 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz2;
+
+create table tstz2(t timestamp with time zone);
+
+insert into table tstz2 values
+  ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+  ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00');
+
+select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00';
+
+select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00';
+
+select min(t),max(t) from tstz2;
+
+select t from tstz2 group by t order by t;
+
+select * from tstz2 a join tstz2 b on a.t=b.t order by a.t;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex.q.out b/ql/src/test/results/clientnegative/serde_regex.q.out
index a1ec5ca..1047a82 100644
--- a/ql/src/test/results/clientnegative/serde_regex.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time TIMESTAMP,
+  `time` TIMESTAMP,
   request STRING,
   status INT,
   size INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex2.q.out b/ql/src/test/results/clientnegative/serde_regex2.q.out
index 374675d..b0703fb 100644
--- a/ql/src/test/results/clientnegative/serde_regex2.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex2.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -26,7 +26,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -56,7 +56,7 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
 PREHOOK: type: QUERY
 PREHOOK: Input: default@serde_regex
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex3.q.out b/ql/src/test/results/clientnegative/serde_regex3.q.out
index dc0a0e2..33d647b 100644
--- a/ql/src/test/results/clientnegative/serde_regex3.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex3.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out
index ff2e752..589ce59 100644
--- a/ql/src/test/results/clientpositive/create_like.q.out
+++ b/ql/src/test/results/clientpositive/create_like.q.out
@@ -657,13 +657,13 @@ POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@table5
 POSTHOOK: Output: default@table5
 PREHOOK: query: create table orc_table (
-time string)
+`time` string)
 stored as ORC tblproperties ("orc.compress"="SNAPPY")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@orc_table
 POSTHOOK: query: create table orc_table (
-time string)
+`time` string)
 stored as ORC tblproperties ("orc.compress"="SNAPPY")
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/join43.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join43.q.out b/ql/src/test/results/clientpositive/join43.q.out
index e8c7278..24168ca 100644
--- a/ql/src/test/results/clientpositive/join43.q.out
+++ b/ql/src/test/results/clientpositive/join43.q.out
@@ -1,8 +1,8 @@
-PREHOOK: query: create table purchase_history (s string, product string, price double, time int)
+PREHOOK: query: create table purchase_history (s string, product string, price double, `time` int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@purchase_history
-POSTHOOK: query: create table purchase_history (s string, product string, price double, time int)
+POSTHOOK: query: create table purchase_history (s string, product string, price double, `time` int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@purchase_history
@@ -46,11 +46,11 @@ POSTHOOK: Lineage: purchase_history.price EXPRESSION [(values__tmp__table__4)val
 POSTHOOK: Lineage: purchase_history.product SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
 POSTHOOK: Lineage: purchase_history.s SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
 POSTHOOK: Lineage: purchase_history.time EXPRESSION [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
-PREHOOK: query: create table cart_history (s string, cart_id int, time int)
+PREHOOK: query: create table cart_history (s string, cart_id int, `time` int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@cart_history
-POSTHOOK: query: create table cart_history (s string, cart_id int, time int)
+POSTHOOK: query: create table cart_history (s string, cart_id int, `time` int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@cart_history
@@ -108,11 +108,11 @@ POSTHOOK: Output: default@cart_history
 POSTHOOK: Lineage: cart_history.cart_id EXPRESSION [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
 POSTHOOK: Lineage: cart_history.s SIMPLE [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
 POSTHOOK: Lineage: cart_history.time EXPRESSION [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
-PREHOOK: query: create table events (s string, st2 string, n int, time int)
+PREHOOK: query: create table events (s string, st2 string, n int, `time` int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@events
-POSTHOOK: query: create table events (s string, st2 string, n int, time int)
+POSTHOOK: query: create table events (s string, st2 string, n int, `time` int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@events
@@ -181,15 +181,15 @@ select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
@@ -197,15 +197,15 @@ select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -357,15 +357,15 @@ PREHOOK: query: select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 PREHOOK: type: QUERY
 PREHOOK: Input: default@cart_history
@@ -376,15 +376,15 @@ POSTHOOK: query: select s
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@cart_history
@@ -399,15 +399,15 @@ select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
@@ -415,15 +415,15 @@ select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
@@ -577,15 +577,15 @@ PREHOOK: query: select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 PREHOOK: type: QUERY
 PREHOOK: Input: default@cart_history
@@ -596,15 +596,15 @@ POSTHOOK: query: select *
 from (
   select last.*, action.st2, action.n
   from (
-    select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+    select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
     from (select * from purchase_history) purchase
     join (select * from cart_history) mevt
     on purchase.s = mevt.s
-    where purchase.time > mevt.time
-    group by purchase.s, purchase.time
+    where purchase.`time` > mevt.`time`
+    group by purchase.s, purchase.`time`
   ) last
   join (select * from events) action
-  on last.s = action.s and last.last_stage_time = action.time
+  on last.s = action.s and last.last_stage_time = action.`time`
 ) list
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@cart_history

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/serde_regex.q.out b/ql/src/test/results/clientpositive/serde_regex.q.out
index 7bebb0c..5a19ec9 100644
--- a/ql/src/test/results/clientpositive/serde_regex.q.out
+++ b/ql/src/test/results/clientpositive/serde_regex.q.out
@@ -3,7 +3,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -20,7 +20,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -51,7 +51,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -69,7 +69,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size INT,
@@ -99,21 +99,21 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
 PREHOOK: type: QUERY
 PREHOOK: Input: default@serde_regex
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM serde_regex ORDER BY time
+POSTHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@serde_regex
 #### A masked pattern was here ####
 127.0.0.1	-	frank	[10/Oct/2000:13:55:36 -0700]	"GET /apache_pb.gif HTTP/1.0"	200	2326	NULL	NULL
 127.0.0.1	-	-	[26/May/2009:00:00:00 +0000]	"GET /someurl/?track=Blabla(Main) HTTP/1.1"	200	5864	-	"Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19"
-PREHOOK: query: SELECT host, size, status, time from serde_regex ORDER BY time
+PREHOOK: query: SELECT host, size, status, `time` from serde_regex ORDER BY `time`
 PREHOOK: type: QUERY
 PREHOOK: Input: default@serde_regex
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT host, size, status, time from serde_regex ORDER BY time
+POSTHOOK: query: SELECT host, size, status, `time` from serde_regex ORDER BY `time`
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@serde_regex
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz.q.out b/ql/src/test/results/clientpositive/timestamptz.q.out
new file mode 100644
index 0000000..626fe92
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz.q.out
@@ -0,0 +1,124 @@
+PREHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: _dummy_table
+          Row Limit Per Split: 1
+          Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 2005-01-03 02:01:00.0 Z (type: timestamp with time zone)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+PREHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: _dummy_table
+          Row Limit Per Split: 1
+          Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 2016-01-03 20:26:34.0123 Z (type: timestamp with time zone)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 20:26:34.0123 Z
+PREHOOK: query: explain select cast('2016-01-03Europe/London' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03Europe/London' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: _dummy_table
+          Row Limit Per Split: 1
+          Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 2016-01-03 00:00:00.0 Z (type: timestamp with time zone)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: select cast('2016-01-03Europe/London' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03Europe/London' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 00:00:00.0 Z
+PREHOOK: query: explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: _dummy_table
+          Row Limit Per Split: 1
+          Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 2016-01-03 12:34:56.38 Z (type: timestamp with time zone)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 12:34:56.38 Z

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_1.q.out b/ql/src/test/results/clientpositive/timestamptz_1.q.out
new file mode 100644
index 0000000..75bbfac
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_1.q.out
@@ -0,0 +1,156 @@
+PREHOOK: query: drop table tstz1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz1(t timestamp with time zone)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: create table tstz1(t timestamp with time zone)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz1
+PREHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 20:26:34.0 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 20:26:34
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.1 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.1
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_2.q.out b/ql/src/test/results/clientpositive/timestamptz_2.q.out
new file mode 100644
index 0000000..2666735
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_2.q.out
@@ -0,0 +1,78 @@
+PREHOOK: query: drop table tstz2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz2(t timestamp with time zone)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: create table tstz2(t timestamp with time zone)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz2
+PREHOOK: query: insert into table tstz2 values
+  ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+  ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: insert into table tstz2 values
+  ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+  ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tstz2
+POSTHOOK: Lineage: tstz2.t EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z
+PREHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select min(t),max(t) from tstz2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select min(t),max(t) from tstz2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z	2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select t from tstz2 group by t order by t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select t from tstz2 group by t order by t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+2005-04-03 10:01:00.04067 Z
+2013-06-03 01:01:00.30547 Z
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z	2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z	2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z	2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z	2005-01-03 02:01:00.0 Z
+2005-04-03 10:01:00.04067 Z	2005-04-03 10:01:00.04067 Z
+2013-06-03 01:01:00.30547 Z	2013-06-03 01:01:00.30547 Z
+2016-01-03 04:26:34.0123 Z	2016-01-03 04:26:34.0123 Z

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/if/serde.thrift
----------------------------------------------------------------------
diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift
index 1d40d5a..56d8beb 100644
--- a/serde/if/serde.thrift
+++ b/serde/if/serde.thrift
@@ -64,6 +64,7 @@ const string DECIMAL_TYPE_NAME   = "decimal";
 const string BINARY_TYPE_NAME    = "binary";
 const string INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month";
 const string INTERVAL_DAY_TIME_TYPE_NAME   = "interval_day_time";
+const string TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
 
 const string LIST_TYPE_NAME = "array";
 const string MAP_TYPE_NAME  = "map";
@@ -95,6 +96,7 @@ const set<string> PrimitiveTypes  = [
   INTERVAL_DAY_TIME_TYPE_NAME
   DECIMAL_TYPE_NAME
   BINARY_TYPE_NAME
+  TIMESTAMPTZ_TYPE_NAME
 ],
 
 const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
index 907acf2..18a3458 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
@@ -85,6 +85,8 @@ serdeConstants::serdeConstants() {
 
   INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time";
 
+  TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
+
   LIST_TYPE_NAME = "array";
 
   MAP_TYPE_NAME = "map";
@@ -119,6 +121,7 @@ serdeConstants::serdeConstants() {
   PrimitiveTypes.insert("interval_day_time");
   PrimitiveTypes.insert("decimal");
   PrimitiveTypes.insert("binary");
+  PrimitiveTypes.insert("timestamp with time zone");
 
   CollectionTypes.insert("array");
   CollectionTypes.insert("map");

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-cpp/serde_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h
index 8785bd2..e261ed3 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h
@@ -52,6 +52,7 @@ class serdeConstants {
   std::string BINARY_TYPE_NAME;
   std::string INTERVAL_YEAR_MONTH_TYPE_NAME;
   std::string INTERVAL_DAY_TIME_TYPE_NAME;
+  std::string TIMESTAMPTZ_TYPE_NAME;
   std::string LIST_TYPE_NAME;
   std::string MAP_TYPE_NAME;
   std::string STRUCT_TYPE_NAME;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
index 2578d3e..398ec6f 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
@@ -110,6 +110,8 @@ public class serdeConstants {
 
   public static final String INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time";
 
+  public static final String TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
+
   public static final String LIST_TYPE_NAME = "array";
 
   public static final String MAP_TYPE_NAME = "map";
@@ -146,6 +148,7 @@ public class serdeConstants {
     PrimitiveTypes.add("interval_day_time");
     PrimitiveTypes.add("decimal");
     PrimitiveTypes.add("binary");
+    PrimitiveTypes.add("timestamp with time zone");
   }
 
   public static final Set<String> CollectionTypes = new HashSet<String>();

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
index ea2dbbe..2141e8f 100644
--- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
+++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
@@ -55,6 +55,7 @@ final class Constant extends \Thrift\Type\TConstant {
   static protected $BINARY_TYPE_NAME;
   static protected $INTERVAL_YEAR_MONTH_TYPE_NAME;
   static protected $INTERVAL_DAY_TIME_TYPE_NAME;
+  static protected $TIMESTAMPTZ_TYPE_NAME;
   static protected $LIST_TYPE_NAME;
   static protected $MAP_TYPE_NAME;
   static protected $STRUCT_TYPE_NAME;
@@ -215,6 +216,10 @@ final class Constant extends \Thrift\Type\TConstant {
     return "interval_day_time";
   }
 
+  static protected function init_TIMESTAMPTZ_TYPE_NAME() {
+    return "timestamp with time zone";
+  }
+
   static protected function init_LIST_TYPE_NAME() {
     return "array";
   }
@@ -267,6 +272,7 @@ final class Constant extends \Thrift\Type\TConstant {
       "interval_day_time" => true,
       "decimal" => true,
       "binary" => true,
+      "timestamp with time zone" => true,
     );
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
index e3b24eb..bc8387b 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
@@ -46,6 +46,7 @@ DECIMAL_TYPE_NAME = "decimal"
 BINARY_TYPE_NAME = "binary"
 INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"
 INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time"
+TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone"
 LIST_TYPE_NAME = "array"
 MAP_TYPE_NAME = "map"
 STRUCT_TYPE_NAME = "struct"
@@ -73,6 +74,7 @@ PrimitiveTypes = set([
   "interval_day_time",
   "decimal",
   "binary",
+  "timestamp with time zone",
 ])
 CollectionTypes = set([
   "array",

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-rb/serde_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb
index 15efaea..da8748d 100644
--- a/serde/src/gen/thrift/gen-rb/serde_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb
@@ -81,6 +81,8 @@ INTERVAL_YEAR_MONTH_TYPE_NAME = %q"interval_year_month"
 
 INTERVAL_DAY_TIME_TYPE_NAME = %q"interval_day_time"
 
+TIMESTAMPTZ_TYPE_NAME = %q"timestamp with time zone"
+
 LIST_TYPE_NAME = %q"array"
 
 MAP_TYPE_NAME = %q"map"
@@ -116,6 +118,7 @@ PrimitiveTypes = Set.new([
   %q"interval_day_time",
   %q"decimal",
   %q"binary",
+  %q"timestamp with time zone",
 ])
 
 CollectionTypes = Set.new([

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index 5ecfbca..9ead0ed 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.slf4j.Logger;
@@ -276,6 +277,12 @@ public final class SerDeUtils {
           sb.append('"');
           break;
         }
+        case TIMESTAMPTZ: {
+          sb.append('"');
+          sb.append(((TimestampTZObjectInspector) poi).getPrimitiveWritableObject(o));
+          sb.append('"');
+          break;
+        }
         case BINARY: {
           BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
           Text txt = new Text();

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index 89e15c3..f333ae9 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -76,6 +77,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -420,7 +422,15 @@ public class BinarySortableSerDe extends AbstractSerDe {
         }
         t.setBinarySortable(bytes, 0);
         return t;
-
+      case TIMESTAMPTZ:
+        TimestampTZWritable tstz = (reuse == null ? new TimestampTZWritable() :
+            (TimestampTZWritable) reuse);
+        byte[] data = new byte[TimestampTZWritable.BINARY_SORTABLE_LENGTH];
+        for (int i = 0; i < data.length; i++) {
+          data[i] = buffer.read(invert);
+        }
+        tstz.fromBinarySortable(data, 0);
+        return tstz;
       case INTERVAL_YEAR_MONTH: {
         HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable()
             : (HiveIntervalYearMonthWritable) reuse;
@@ -788,6 +798,12 @@ public class BinarySortableSerDe extends AbstractSerDe {
         serializeTimestampWritable(buffer, t, invert);
         return;
       }
+      case TIMESTAMPTZ: {
+        TimestampTZObjectInspector toi = (TimestampTZObjectInspector) poi;
+        TimestampTZWritable t = toi.getPrimitiveWritableObject(o);
+        serializeTimestampTZWritable(buffer, t, invert);
+        return;
+      }
       case INTERVAL_YEAR_MONTH: {
         HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi;
         HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o);
@@ -958,6 +974,14 @@ public class BinarySortableSerDe extends AbstractSerDe {
     }
   }
 
+  public static void serializeTimestampTZWritable(
+      ByteStream.Output buffer, TimestampTZWritable t, boolean invert) {
+    byte[] data = t.toBinarySortable();
+    for (byte b : data) {
+      writeByte(buffer, b, invert);
+    }
+  }
+
   public static void serializeHiveIntervalYearMonth(ByteStream.Output buffer,
       HiveIntervalYearMonth intervalYearMonth, boolean invert) {
     int totalMonths = intervalYearMonth.getTotalMonths();

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
new file mode 100644
index 0000000..8c3f8f6
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
@@ -0,0 +1,427 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * Writable for TimestampTZ. Copied from TimestampWritable.
+ * After we replace {@link java.sql.Timestamp} with {@link java.time.LocalDateTime} for Timestamp,
+ * it'll need a new Writable.
+ */
+public class TimestampTZWritable implements WritableComparable<TimestampTZWritable> {
+
+  public static final byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
+  private static final int DECIMAL_OR_SECOND_VINT_FLAG = 1 << 31;
+  private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48; // only need flip the MSB?
+
+  /**
+   * The maximum number of bytes required for a TimestampWritable
+   */
+  public static final int MAX_BYTES = 13;
+
+  public static final int BINARY_SORTABLE_LENGTH = 11;
+
+  private TimestampTZ timestampTZ = new TimestampTZ();
+
+  /**
+   * true if data is stored in timestamptz field rather than byte arrays.
+   * allows for lazy conversion to bytes when necessary
+   * false otherwise
+   */
+  private boolean bytesEmpty = true;
+  private boolean timestampTZEmpty = true;
+
+  /* Allow use of external byte[] for efficiency */
+  private byte[] currentBytes;
+  private final byte[] internalBytes = new byte[MAX_BYTES];
+  private byte[] externalBytes;
+  private int offset;
+
+  public TimestampTZWritable() {
+    bytesEmpty = false;
+    currentBytes = internalBytes;
+    offset = 0;
+  }
+
+  public TimestampTZWritable(byte[] bytes, int offset) {
+    set(bytes, offset);
+  }
+
+  public TimestampTZWritable(TimestampTZWritable other) {
+    this(other.getBytes(), 0);
+  }
+
+  public TimestampTZWritable(TimestampTZ tstz) {
+    set(tstz);
+  }
+
+  public void set(byte[] bytes, int offset) {
+    externalBytes = bytes;
+    this.offset = offset;
+    bytesEmpty = false;
+    timestampTZEmpty = true;
+    currentBytes = externalBytes;
+  }
+
+  public void set(TimestampTZ tstz) {
+    if (tstz == null) {
+      timestampTZ.setZonedDateTime(null);
+      return;
+    }
+    timestampTZ = tstz;
+    bytesEmpty = true;
+    timestampTZEmpty = false;
+  }
+
+  public void set(TimestampTZWritable t) {
+    if (t.bytesEmpty) {
+      set(t.getTimestampTZ());
+    } else if (t.currentBytes == t.externalBytes) {
+      set(t.currentBytes, t.offset);
+    } else {
+      set(t.currentBytes, 0);
+    }
+  }
+
+  public TimestampTZ getTimestampTZ() {
+    populateTimestampTZ();
+    return timestampTZ;
+  }
+
+  /**
+   * Used to create copies of objects
+   *
+   * @return a copy of the internal TimestampTZWritable byte[]
+   */
+  public byte[] getBytes() {
+    checkBytes();
+
+    int len = getTotalLength();
+    byte[] b = new byte[len];
+
+    System.arraycopy(currentBytes, offset, b, 0, len);
+    return b;
+  }
+
+  /**
+   * @return length of serialized TimestampTZWritable data. As a side effect, populates the internal
+   * byte array if empty.
+   */
+  private int getTotalLength() {
+    checkBytes();
+    return getTotalLength(currentBytes, offset);
+  }
+
+  /**
+   * The data of TimestampTZWritable can be stored either in a byte[]
+   * or in a TimestampTZ object. Calling this method ensures that the byte[]
+   * is populated from the TimestampTZ object if previously empty.
+   */
+  private void checkBytes() {
+    if (bytesEmpty) {
+      populateBytes();
+      offset = 0;
+      currentBytes = internalBytes;
+      bytesEmpty = false;
+    }
+  }
+
+  // Writes the TimestampTZ's serialized value to the internal byte array.
+  private void populateBytes() {
+    Arrays.fill(internalBytes, (byte) 0);
+
+    long seconds = timestampTZ.getEpochSecond();
+    int nanos = timestampTZ.getNanos();
+
+    boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
+    boolean hasDecimal = setNanosBytes(nanos, internalBytes, offset + 4, hasSecondVInt);
+
+    int firstInt = (int) seconds;
+    if (hasDecimal || hasSecondVInt) {
+      firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
+    }
+    intToBytes(firstInt, internalBytes, offset);
+    if (hasSecondVInt) {
+      LazyBinaryUtils.writeVLongToByteArray(internalBytes,
+          offset + 4 + WritableUtils.decodeVIntSize(internalBytes[offset + 4]),
+          seconds >> 31);
+    }
+  }
+
+  private void populateTimestampTZ() {
+    if (timestampTZEmpty) {
+      long seconds = getSeconds();
+      int nanos = getNanos();
+      timestampTZ.set(seconds, nanos);
+      timestampTZEmpty = false;
+    }
+  }
+
+  public long getSeconds() {
+    if (!timestampTZEmpty) {
+      return timestampTZ.getEpochSecond();
+    } else if (!bytesEmpty) {
+      return getSeconds(currentBytes, offset);
+    }
+    throw new IllegalStateException("Both timestamp and bytes are empty");
+  }
+
+  public int getNanos() {
+    if (!timestampTZEmpty) {
+      return timestampTZ.getNanos();
+    } else if (!bytesEmpty) {
+      return hasDecimalOrSecondVInt(currentBytes[offset]) ? getNanos(currentBytes, offset + 4) : 0;
+    }
+    throw new IllegalStateException("Both timestamp and bytes are empty");
+  }
+
+  @Override
+  public int compareTo(TimestampTZWritable o) {
+    return getTimestampTZ().compareTo(o.getTimestampTZ());
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (o instanceof TimestampTZWritable) {
+      return compareTo((TimestampTZWritable) o) == 0;
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return getTimestampTZ().hashCode();
+  }
+
+  @Override
+  public String toString() {
+    populateTimestampTZ();
+    return timestampTZ.toString();
+  }
+
+  @Override
+  public void write(DataOutput dataOutput) throws IOException {
+    checkBytes();
+    dataOutput.write(currentBytes, offset, getTotalLength());
+  }
+
+  @Override
+  public void readFields(DataInput dataInput) throws IOException {
+    dataInput.readFully(internalBytes, 0, 4);
+    if (hasDecimalOrSecondVInt(internalBytes[0])) {
+      dataInput.readFully(internalBytes, 4, 1);
+      int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
+      if (len > 1) {
+        dataInput.readFully(internalBytes, 5, len - 1);
+      }
+
+      long vlong = LazyBinaryUtils.readVLongFromByteArray(internalBytes, 4);
+      Preconditions.checkState(vlong >= -1000000000 && vlong <= 999999999,
+          "Invalid nanos value for a TimestampTZWritable: " + vlong +
+              ", expected to be between -1000000000 and 999999999.");
+      if (vlong < 0) {
+        // This indicates there is a second VInt containing the additional bits of the seconds
+        // field.
+        dataInput.readFully(internalBytes, 4 + len, 1);
+        int secondVIntLen = (byte) WritableUtils.decodeVIntSize(internalBytes[4 + len]);
+        if (secondVIntLen > 1) {
+          dataInput.readFully(internalBytes, 5 + len, secondVIntLen - 1);
+        }
+      }
+    }
+    currentBytes = internalBytes;
+    offset = 0;
+    timestampTZEmpty = true;
+    bytesEmpty = false;
+  }
+
+  public byte[] toBinarySortable() {
+    byte[] b = new byte[BINARY_SORTABLE_LENGTH];
+    int nanos = getNanos();
+    // We flip the highest-order bit of the seven-byte representation of seconds to make negative
+    // values come before positive ones.
+    long seconds = getSeconds() ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+    sevenByteLongToBytes(seconds, b, 0);
+    intToBytes(nanos, b, 7);
+    return b;
+  }
+
+  public void fromBinarySortable(byte[] bytes, int binSortOffset) {
+    // Flip the sign bit (and unused bits of the high-order byte) of the seven-byte long back.
+    long seconds = readSevenByteLong(bytes, binSortOffset) ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+    int nanos = bytesToInt(bytes, binSortOffset + 7);
+    timestampTZ.set(seconds, nanos);
+    timestampTZEmpty = false;
+    bytesEmpty = true;
+  }
+
+  public void writeToByteStream(ByteStream.RandomAccessOutput byteStream) {
+    checkBytes();
+    byteStream.write(currentBytes, offset, getTotalLength());
+  }
+
+  /**
+   * Given an integer representing nanoseconds, write its serialized
+   * value to the byte array b at offset
+   *
+   * @param nanos
+   * @param b
+   * @param offset
+   * @return
+   */
+  private static boolean setNanosBytes(int nanos, byte[] b, int offset, boolean hasSecondVInt) {
+    int decimal = 0;
+    if (nanos != 0) {
+      int counter = 0;
+      while (counter < 9) {
+        decimal *= 10;
+        decimal += nanos % 10;
+        nanos /= 10;
+        counter++;
+      }
+    }
+
+    if (hasSecondVInt || decimal != 0) {
+      // We use the sign of the reversed-nanoseconds field to indicate that there is a second VInt
+      // present.
+      LazyBinaryUtils.writeVLongToByteArray(b, offset, hasSecondVInt ? (-decimal - 1) : decimal);
+    }
+    return decimal != 0;
+  }
+
+  public static void setTimestampTZ(TimestampTZ t, byte[] bytes, int offset) {
+    long seconds = getSeconds(bytes, offset);
+    int nanos = hasDecimalOrSecondVInt(bytes[offset]) ? getNanos(bytes, offset + 4) : 0;
+    t.set(seconds, nanos);
+  }
+
+  public static int getTotalLength(byte[] bytes, int offset) {
+    int len = 4;
+    if (hasDecimalOrSecondVInt(bytes[offset])) {
+      int firstVIntLen = WritableUtils.decodeVIntSize(bytes[offset + 4]);
+      len += firstVIntLen;
+      if (hasSecondVInt(bytes[offset + 4])) {
+        len += WritableUtils.decodeVIntSize(bytes[offset + 4 + firstVIntLen]);
+      }
+    }
+    return len;
+  }
+
+  public static long getSeconds(byte[] bytes, int offset) {
+    int firstVInt = bytesToInt(bytes, offset);
+    if (firstVInt >= 0 || !hasSecondVInt(bytes[offset + 4])) {
+      return firstVInt & ~DECIMAL_OR_SECOND_VINT_FLAG;
+    }
+    return ((long) (firstVInt & ~DECIMAL_OR_SECOND_VINT_FLAG)) |
+        (LazyBinaryUtils.readVLongFromByteArray(bytes,
+            offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4])) << 31);
+  }
+
+  public static int getNanos(byte[] bytes, int offset) {
+    int val = (int) LazyBinaryUtils.readVLongFromByteArray(bytes, offset);
+    if (val < 0) {
+      val = -val - 1;
+    }
+    int len = (int) Math.floor(Math.log10(val)) + 1;
+
+    // Reverse the value
+    int tmp = 0;
+    while (val != 0) {
+      tmp *= 10;
+      tmp += val % 10;
+      val /= 10;
+    }
+    val = tmp;
+
+    if (len < 9) {
+      val *= Math.pow(10, 9 - len);
+    }
+    return val;
+  }
+
+  private static boolean hasDecimalOrSecondVInt(byte b) {
+    return b < 0;
+  }
+
+  private static boolean hasSecondVInt(byte b) {
+    return WritableUtils.isNegativeVInt(b);
+  }
+
+  /**
+   * Writes <code>value</code> into <code>dest</code> at <code>offset</code>
+   *
+   * @param value
+   * @param dest
+   * @param offset
+   */
+  private static void intToBytes(int value, byte[] dest, int offset) {
+    dest[offset] = (byte) ((value >> 24) & 0xFF);
+    dest[offset + 1] = (byte) ((value >> 16) & 0xFF);
+    dest[offset + 2] = (byte) ((value >> 8) & 0xFF);
+    dest[offset + 3] = (byte) (value & 0xFF);
+  }
+
+  /**
+   * Writes <code>value</code> into <code>dest</code> at <code>offset</code> as a seven-byte
+   * serialized long number.
+   */
+  private static void sevenByteLongToBytes(long value, byte[] dest, int offset) {
+    dest[offset] = (byte) ((value >> 48) & 0xFF);
+    dest[offset + 1] = (byte) ((value >> 40) & 0xFF);
+    dest[offset + 2] = (byte) ((value >> 32) & 0xFF);
+    dest[offset + 3] = (byte) ((value >> 24) & 0xFF);
+    dest[offset + 4] = (byte) ((value >> 16) & 0xFF);
+    dest[offset + 5] = (byte) ((value >> 8) & 0xFF);
+    dest[offset + 6] = (byte) (value & 0xFF);
+  }
+
+  /**
+   * @param bytes
+   * @param offset
+   * @return integer represented by the four bytes in <code>bytes</code>
+   * beginning at <code>offset</code>
+   */
+  private static int bytesToInt(byte[] bytes, int offset) {
+    return ((0xFF & bytes[offset]) << 24)
+        | ((0xFF & bytes[offset + 1]) << 16)
+        | ((0xFF & bytes[offset + 2]) << 8)
+        | (0xFF & bytes[offset + 3]);
+  }
+
+  private static long readSevenByteLong(byte[] bytes, int offset) {
+    // We need to shift everything 8 bits left and then shift back to populate the sign field.
+    return (((0xFFL & bytes[offset]) << 56)
+        | ((0xFFL & bytes[offset + 1]) << 48)
+        | ((0xFFL & bytes[offset + 2]) << 40)
+        | ((0xFFL & bytes[offset + 3]) << 32)
+        | ((0xFFL & bytes[offset + 4]) << 24)
+        | ((0xFFL & bytes[offset + 5]) << 16)
+        | ((0xFFL & bytes[offset + 6]) << 8)) >> 8;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
index 23dbe6a..2b940fd 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiv
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampTZObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyVoidObjectInspector;
 import org.apache.hadoop.hive.serde2.lazydio.LazyDioBinary;
 import org.apache.hadoop.hive.serde2.lazydio.LazyDioBoolean;
@@ -128,6 +129,8 @@ public final class LazyFactory {
       return new LazyDate((LazyDateObjectInspector) oi);
     case TIMESTAMP:
       return new LazyTimestamp((LazyTimestampObjectInspector) oi);
+    case TIMESTAMPTZ:
+      return new LazyTimestampTZ((LazyTimestampTZObjectInspector) oi);
     case INTERVAL_YEAR_MONTH:
       return new LazyHiveIntervalYearMonth((LazyHiveIntervalYearMonthObjectInspector) oi);
     case INTERVAL_DAY_TIME: