You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by li...@apache.org on 2017/05/11 06:47:24 UTC

[3/3] hive git commit: HIVE-14412: Add timestamp with time zone (Rui Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin, Ashutosh Chauhan)

HIVE-14412: Add timestamp with time zone (Rui Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin, Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6b6a00ff
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6b6a00ff
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6b6a00ff

Branch: refs/heads/master
Commit: 6b6a00ffb0dae651ef407a99bab00d5e74f0d6aa
Parents: ee91b8e
Author: Rui Li <li...@apache.org>
Authored: Thu May 11 14:46:57 2017 +0800
Committer: Rui Li <li...@apache.org>
Committed: Thu May 11 14:46:57 2017 +0800

----------------------------------------------------------------------
 .../hadoop/hive/common/type/TimestampTZ.java    | 197 +++++++++
 .../hive/common/type/TestTimestampTZ.java       | 102 +++++
 .../test/queries/clientnegative/serde_regex.q   |   4 +-
 .../test/queries/clientpositive/serde_regex.q   |   6 +-
 .../results/clientnegative/serde_regex.q.out    |   6 +-
 .../results/clientpositive/serde_regex.q.out    |  12 +-
 .../src/test/queries/positive/hbase_timestamp.q |  26 +-
 .../test/results/positive/hbase_timestamp.q.out |  52 +--
 .../queries/clientpositive/orc_format_part.q    |  12 +-
 .../clientpositive/orc_nonstd_partitions_loc.q  |  14 +-
 .../queries/clientpositive/rcfile_format_part.q |  12 +-
 .../rcfile_nonstd_partitions_loc.q              |  14 +-
 .../clientpositive/orc_format_part.q.out        |  24 +-
 .../orc_nonstd_partitions_loc.q.out             |  28 +-
 .../clientpositive/rcfile_format_part.q.out     |  24 +-
 .../rcfile_nonstd_partitions_loc.q.out          |  28 +-
 .../org/apache/hive/jdbc/HiveBaseResultSet.java |   3 +
 .../java/org/apache/hive/jdbc/JdbcColumn.java   |  11 +
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   4 +-
 .../hadoop/hive/ql/exec/GroupByOperator.java    |   4 +-
 .../hive/ql/exec/SerializationUtilities.java    |  18 +
 .../calcite/translator/TypeConverter.java       |   3 +
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   1 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   3 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |   6 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g    |   2 +
 .../hive/ql/parse/TypeCheckProcFactory.java     |   2 +
 .../apache/hadoop/hive/ql/stats/StatsUtils.java |  16 +-
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |   1 +
 .../apache/hadoop/hive/ql/udf/UDFToString.java  |  10 +
 .../hadoop/hive/ql/udf/generic/GenericUDF.java  |   3 +
 .../hive/ql/udf/generic/GenericUDFDate.java     |   2 +
 .../ql/udf/generic/GenericUDFToTimestampTZ.java |  89 ++++
 .../TestSQL11ReservedKeyWordsNegative.java      |  17 +-
 .../test/queries/clientnegative/serde_regex.q   |   2 +-
 .../test/queries/clientnegative/serde_regex2.q  |   4 +-
 .../test/queries/clientnegative/serde_regex3.q  |   2 +-
 .../test/queries/clientpositive/create_like.q   |   2 +-
 ql/src/test/queries/clientpositive/join43.q     |  38 +-
 .../test/queries/clientpositive/serde_regex.q   |   8 +-
 .../test/queries/clientpositive/timestamptz.q   |  11 +
 .../test/queries/clientpositive/timestamptz_1.q |  25 ++
 .../test/queries/clientpositive/timestamptz_2.q |  19 +
 .../results/clientnegative/serde_regex.q.out    |   2 +-
 .../results/clientnegative/serde_regex2.q.out   |   6 +-
 .../results/clientnegative/serde_regex3.q.out   |   2 +-
 .../results/clientpositive/create_like.q.out    |   4 +-
 ql/src/test/results/clientpositive/join43.q.out |  76 ++--
 .../results/clientpositive/serde_regex.q.out    |  16 +-
 .../results/clientpositive/timestamptz.q.out    | 124 ++++++
 .../results/clientpositive/timestamptz_1.q.out  | 156 +++++++
 .../results/clientpositive/timestamptz_2.q.out  |  78 ++++
 serde/if/serde.thrift                           |   2 +
 .../src/gen/thrift/gen-cpp/serde_constants.cpp  |   3 +
 serde/src/gen/thrift/gen-cpp/serde_constants.h  |   1 +
 .../hadoop/hive/serde/serdeConstants.java       |   3 +
 .../org/apache/hadoop/hive/serde/Types.php      |   6 +
 .../org_apache_hadoop_hive_serde/constants.py   |   2 +
 serde/src/gen/thrift/gen-rb/serde_constants.rb  |   3 +
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |   7 +
 .../binarysortable/BinarySortableSerDe.java     |  26 +-
 .../hive/serde2/io/TimestampTZWritable.java     | 427 +++++++++++++++++++
 .../hadoop/hive/serde2/lazy/LazyFactory.java    |   3 +
 .../hive/serde2/lazy/LazyTimestampTZ.java       |  91 ++++
 .../hadoop/hive/serde2/lazy/LazyUtils.java      |  11 +-
 .../LazyPrimitiveObjectInspectorFactory.java    |   4 +
 .../LazyTimestampTZObjectInspector.java         |  43 ++
 .../serde2/lazybinary/LazyBinaryFactory.java    |   3 +
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |   7 +
 .../lazybinary/LazyBinaryTimestampTZ.java       |  36 ++
 .../hive/serde2/lazybinary/LazyBinaryUtils.java |   5 +
 .../ObjectInspectorConverters.java              |   4 +
 .../objectinspector/ObjectInspectorUtils.java   |  19 +
 .../PrimitiveObjectInspector.java               |   4 +-
 .../JavaTimestampTZObjectInspector.java         |  76 ++++
 .../PrimitiveObjectInspectorConverter.java      |  26 ++
 .../PrimitiveObjectInspectorFactory.java        |  14 +
 .../PrimitiveObjectInspectorUtils.java          | 104 +++--
 .../SettableTimestampTZObjectInspector.java     |  34 ++
 .../primitive/TimestampTZObjectInspector.java   |  29 ++
 ...tableConstantTimestampTZObjectInspector.java |  36 ++
 .../WritableTimestampTZObjectInspector.java     |  79 ++++
 .../apache/hadoop/hive/serde2/thrift/Type.java  |   8 +
 .../hive/serde2/typeinfo/TypeInfoFactory.java   |   3 +
 .../hive/serde2/io/TestTimestampTZWritable.java | 102 +++++
 service-rpc/if/TCLIService.thrift               |  10 +-
 .../thrift/gen-cpp/TCLIService_constants.cpp    |   2 +
 .../gen/thrift/gen-cpp/TCLIService_types.cpp    |  16 +-
 .../src/gen/thrift/gen-cpp/TCLIService_types.h  |   6 +-
 .../rpc/thrift/TCLIServiceConstants.java        |   2 +
 .../service/rpc/thrift/TProtocolVersion.java    |   5 +-
 .../apache/hive/service/rpc/thrift/TTypeId.java |   5 +-
 service-rpc/src/gen/thrift/gen-php/Types.php    |   6 +
 .../gen/thrift/gen-py/TCLIService/constants.py  |   2 +
 .../src/gen/thrift/gen-py/TCLIService/ttypes.py |   6 +
 .../thrift/gen-rb/t_c_l_i_service_constants.rb  |   2 +
 .../gen/thrift/gen-rb/t_c_l_i_service_types.rb  |  10 +-
 .../apache/hive/service/cli/ColumnValue.java    |  11 +
 .../apache/hive/service/cli/TypeDescriptor.java |   2 +
 99 files changed, 2360 insertions(+), 277 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
new file mode 100644
index 0000000..ed83871
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.time.DateTimeException;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
+import java.time.format.TextStyle;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * This is the internal type for Timestamp with time zone.
+ * A wrapper of ZonedDateTime which automatically convert the Zone to UTC.
+ * The full qualified input format of Timestamp with time zone is
+ * "yyyy-MM-dd HH:mm:ss[.SSS...] zoneid/zoneoffset", where the time and zone parts are optional.
+ * If time part is absent, a default '00:00:00.0' will be used.
+ * If zone part is absent, the system time zone will be used.
+ * All timestamp with time zone will be converted and stored as UTC retaining the instant.
+ * E.g. "2017-04-14 18:00:00 Asia/Shanghai" will be converted to
+ * "2017-04-14 10:00:00.0 Z".
+ */
+public class TimestampTZ implements Comparable<TimestampTZ> {
+
+  private static final DateTimeFormatter formatter;
+  private static final ZoneId UTC = ZoneOffset.UTC;
+  private static final ZonedDateTime EPOCH = ZonedDateTime.ofInstant(Instant.EPOCH, UTC);
+  private static final LocalTime DEFAULT_LOCAL_TIME = LocalTime.of(0, 0);
+  private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d");
+  private static final Logger LOG = LoggerFactory.getLogger(TimestampTZ.class);
+
+  private static final ThreadLocal<DateFormat> CONVERT_FORMATTER =
+      ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
+
+  static {
+    DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+    // Date part
+    builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
+    // Time part
+    builder.optionalStart().appendLiteral(" ").append(DateTimeFormatter.ofPattern("HH:mm:ss")).
+        optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).
+        optionalEnd().optionalEnd();
+
+    // Zone part
+    builder.optionalStart().appendLiteral(" ").optionalEnd();
+    builder.optionalStart().appendZoneText(TextStyle.NARROW).optionalEnd();
+
+    formatter = builder.toFormatter();
+  }
+
+  private ZonedDateTime zonedDateTime;
+
+  public TimestampTZ() {
+    this(EPOCH);
+  }
+
+  public TimestampTZ(ZonedDateTime zonedDateTime) {
+    setZonedDateTime(zonedDateTime);
+  }
+
+  public TimestampTZ(long seconds, int nanos) {
+    set(seconds, nanos);
+  }
+
+  public void set(long seconds, int nanos) {
+    Instant instant = Instant.ofEpochSecond(seconds, nanos);
+    setZonedDateTime(ZonedDateTime.ofInstant(instant, UTC));
+  }
+
+  public ZonedDateTime getZonedDateTime() {
+    return zonedDateTime;
+  }
+
+  public void setZonedDateTime(ZonedDateTime zonedDateTime) {
+    this.zonedDateTime = zonedDateTime != null ? zonedDateTime.withZoneSameInstant(UTC) : EPOCH;
+  }
+
+  @Override
+  public String toString() {
+    return zonedDateTime.format(formatter);
+  }
+
+  @Override
+  public int hashCode() {
+    return zonedDateTime.toInstant().hashCode();
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (other instanceof TimestampTZ) {
+      return compareTo((TimestampTZ) other) == 0;
+    }
+    return false;
+  }
+
+  @Override
+  public int compareTo(TimestampTZ o) {
+    return zonedDateTime.toInstant().compareTo(o.zonedDateTime.toInstant());
+  }
+
+  public long getEpochSecond() {
+    return zonedDateTime.toInstant().getEpochSecond();
+  }
+
+  public int getNanos() {
+    return zonedDateTime.toInstant().getNano();
+  }
+
+  public static TimestampTZ parse(String s) {
+    // need to handle offset with single digital hour, see JDK-8066806
+    s = handleSingleDigitHourOffset(s);
+    ZonedDateTime zonedDateTime;
+    try {
+      zonedDateTime = ZonedDateTime.parse(s, formatter);
+    } catch (DateTimeParseException e) {
+      // try to be more tolerant
+      // if the input is invalid instead of incomplete, we'll hit exception here again
+      TemporalAccessor accessor = formatter.parse(s);
+      // LocalDate must be present
+      LocalDate localDate = LocalDate.from(accessor);
+      LocalTime localTime;
+      ZoneId zoneId;
+      try {
+        localTime = LocalTime.from(accessor);
+      } catch (DateTimeException e1) {
+        localTime = DEFAULT_LOCAL_TIME;
+      }
+      try {
+        zoneId = ZoneId.from(accessor);
+      } catch (DateTimeException e2) {
+        // TODO: in future this may come from user specified zone (via set time zone command)
+        zoneId = ZoneId.systemDefault();
+      }
+      zonedDateTime = ZonedDateTime.of(localDate, localTime, zoneId);
+    }
+
+    return new TimestampTZ(zonedDateTime);
+  }
+
+  private static String handleSingleDigitHourOffset(String s) {
+    Matcher matcher = SINGLE_DIGIT_PATTERN.matcher(s);
+    if (matcher.find()) {
+      int index = matcher.start() + 1;
+      s = s.substring(0, index) + "0" + s.substring(index, s.length());
+    }
+    return s;
+  }
+
+  public static TimestampTZ parseOrNull(String s) {
+    try {
+      return parse(s);
+    } catch (DateTimeParseException e) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Invalid string " + s + " for TIMESTAMP WITH TIME ZONE", e);
+      }
+      return null;
+    }
+  }
+
+  // Converts Date to TimestampTZ. The conversion is done text-wise since
+  // Date/Timestamp should be treated as description of date/time.
+  public static TimestampTZ convert(java.util.Date date) {
+    String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date);
+    // TODO: in future this may come from user specified zone (via set time zone command)
+    return parse(s + " " + ZoneId.systemDefault().getId());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
new file mode 100644
index 0000000..739850a
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
@@ -0,0 +1,102 @@
+package org.apache.hadoop.hive.common.type;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.sql.Timestamp;
+import java.time.format.DateTimeParseException;
+import java.util.TimeZone;
+
+public class TestTimestampTZ {
+  @Test
+  public void testConvertToUTC() {
+    String s = "2017-04-14 18:00:00 Asia/Shanghai";
+    TimestampTZ timestampTZ = TimestampTZ.parse(s);
+    Assert.assertEquals("2017-04-14 10:00:00.0 Z", timestampTZ.toString());
+  }
+
+  @Test
+  public void testComparison() {
+    String s1 = "2017-04-14 18:00:00 Asia/Shanghai";
+    String s2 = "2017-04-14 10:00:00.00 GMT";
+    String s3 = "2017-04-14 18:00:00 UTC+08:00";
+    String s4 = "2017-04-14 18:00:00 Europe/London";
+    TimestampTZ tstz1 = TimestampTZ.parse(s1);
+    TimestampTZ tstz2 = TimestampTZ.parse(s2);
+    TimestampTZ tstz3 = TimestampTZ.parse(s3);
+    TimestampTZ tstz4 = TimestampTZ.parse(s4);
+
+    Assert.assertEquals(tstz1, tstz2);
+    Assert.assertEquals(tstz1, tstz3);
+    Assert.assertEquals(tstz1.hashCode(), tstz2.hashCode());
+    Assert.assertEquals(tstz1.hashCode(), tstz3.hashCode());
+    Assert.assertTrue(tstz1.compareTo(tstz4) < 0);
+  }
+
+  @Test
+  public void testDST() {
+    String s1 = "2005-04-03 02:01:00 America/Los_Angeles";
+    String s2 = "2005-04-03 03:01:00 America/Los_Angeles";
+    Assert.assertEquals(TimestampTZ.parse(s1), TimestampTZ.parse(s2));
+  }
+
+  @Test
+  public void testFromToInstant() {
+    String s1 = "2017-04-14 18:00:00 UTC";
+    TimestampTZ tstz = TimestampTZ.parse(s1);
+    long seconds = tstz.getEpochSecond();
+    int nanos = tstz.getNanos();
+    Assert.assertEquals(tstz, new TimestampTZ(seconds, nanos));
+
+    nanos += 123000000;
+    Assert.assertEquals("2017-04-14 18:00:00.123 Z", new TimestampTZ(seconds, nanos).toString());
+
+    seconds -= 3;
+    Assert.assertEquals("2017-04-14 17:59:57.123 Z", new TimestampTZ(seconds, nanos).toString());
+  }
+
+  @Test
+  public void testVariations() {
+    // Omitting zone or time part is allowed
+    TimestampTZ.parse("2017-01-01 13:33:00");
+    TimestampTZ.parse("2017-11-08 Europe/London");
+    TimestampTZ.parse("2017-05-20");
+    TimestampTZ.parse("2017-11-08GMT");
+    TimestampTZ.parse("2017-10-11 GMT+8:00");
+    TimestampTZ.parse("2017-05-08 07:45:00-3:00");
+  }
+
+  @Test
+  public void testInvalidStrings() {
+    // invalid zone
+    try {
+      TimestampTZ.parse("2017-01-01 13:33:00 foo");
+      Assert.fail("Invalid timezone ID should cause exception");
+    } catch (DateTimeParseException e) {
+      // expected
+    }
+    // invalid time part
+    try {
+      TimestampTZ.parse("2017-01-01 13:33:61");
+      Assert.fail("Invalid time should cause exception");
+    } catch (DateTimeParseException e) {
+      // expected
+    }
+  }
+
+  @Test
+  public void testConvertFromTimestamp() {
+    TimeZone defaultZone = TimeZone.getDefault();
+    try {
+      // Use system zone when converting from timestamp to timestamptz
+      String s = "2017-06-12 23:12:56.34";
+      TimeZone.setDefault(TimeZone.getTimeZone("Europe/London"));
+      TimestampTZ tstz1 = TimestampTZ.convert(Timestamp.valueOf(s));
+      TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
+      TimestampTZ tstz2 = TimestampTZ.convert(Timestamp.valueOf(s));
+      Assert.assertTrue(tstz1.compareTo(tstz2) < 0);
+    } finally {
+      TimeZone.setDefault(defaultZone);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/queries/clientnegative/serde_regex.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientnegative/serde_regex.q b/contrib/src/test/queries/clientnegative/serde_regex.q
index a676338..9d27768 100644
--- a/contrib/src/test/queries/clientnegative/serde_regex.q
+++ b/contrib/src/test/queries/clientnegative/serde_regex.q
@@ -8,7 +8,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status INT,
   size INT,
@@ -25,7 +25,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status INT,
   size INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/queries/clientpositive/serde_regex.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_regex.q b/contrib/src/test/queries/clientpositive/serde_regex.q
index d75d607..8aa3eda 100644
--- a/contrib/src/test/queries/clientpositive/serde_regex.q
+++ b/contrib/src/test/queries/clientpositive/serde_regex.q
@@ -6,7 +6,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -23,7 +23,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -39,4 +39,4 @@ STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
 LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
 
-SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
+SELECT * FROM serde_regex ORDER BY `time`;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/results/clientnegative/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/contrib/src/test/results/clientnegative/serde_regex.q.out b/contrib/src/test/results/clientnegative/serde_regex.q.out
index 58b1c02..5c5f594 100644
--- a/contrib/src/test/results/clientnegative/serde_regex.q.out
+++ b/contrib/src/test/results/clientnegative/serde_regex.q.out
@@ -9,7 +9,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status INT,
   size INT,
@@ -27,7 +27,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status INT,
   size INT,
@@ -60,7 +60,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status INT,
   size INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/results/clientpositive/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/contrib/src/test/results/clientpositive/serde_regex.q.out b/contrib/src/test/results/clientpositive/serde_regex.q.out
index 2984293..1ce89e1 100644
--- a/contrib/src/test/results/clientpositive/serde_regex.q.out
+++ b/contrib/src/test/results/clientpositive/serde_regex.q.out
@@ -3,7 +3,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -21,7 +21,7 @@ CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -54,7 +54,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -73,7 +73,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
   host STRING,
   identity STRING,
   `user` STRING,
-  time STRING,
+  `time` STRING,
   request STRING,
   status STRING,
   size STRING,
@@ -104,11 +104,11 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
 PREHOOK: type: QUERY
 PREHOOK: Input: default@serde_regex
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM serde_regex ORDER BY time
+POSTHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@serde_regex
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/hbase-handler/src/test/queries/positive/hbase_timestamp.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_timestamp.q b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
index 0350afe..6ae2c30 100644
--- a/hbase-handler/src/test/queries/positive/hbase_timestamp.q
+++ b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
@@ -1,5 +1,5 @@
 DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time timestamp)
+CREATE TABLE hbase_table (key string, value string, `time` timestamp)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
 DESC extended hbase_table;
@@ -7,14 +7,14 @@ FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, "2012-02-23 10:14
 SELECT * FROM hbase_table;
 
 DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time bigint)
+CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
 FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754000 WHERE (key % 17) = 0;
-SELECT key, value, cast(time as timestamp) FROM hbase_table;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table;
 
 DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time bigint)
+CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
 insert overwrite table hbase_table select key,value,ts FROM
@@ -25,23 +25,23 @@ insert overwrite table hbase_table select key,value,ts FROM
 ) T;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000;
 
 DROP TABLE hbase_table;
-CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp");
 FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, MAP("name", CONCAT(value, " Jr")), "2012-02-23 10:14:52" WHERE (key % 17) = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 3918121..e719b08 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -2,13 +2,13 @@ PREHOOK: query: DROP TABLE hbase_table
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE hbase_table
 POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time timestamp)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` timestamp)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time timestamp)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` timestamp)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 POSTHOOK: type: CREATETABLE
@@ -69,13 +69,13 @@ POSTHOOK: query: DROP TABLE hbase_table
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@hbase_table
 POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 POSTHOOK: type: CREATETABLE
@@ -89,11 +89,11 @@ POSTHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value,
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbase_table
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
@@ -125,13 +125,13 @@ POSTHOOK: query: DROP TABLE hbase_table
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@hbase_table
 POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
 POSTHOOK: type: CREATETABLE
@@ -156,10 +156,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbase_table
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -193,21 +193,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
 165	val_165	1973-03-03 01:46:40
 396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -241,11 +241,11 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
@@ -254,10 +254,10 @@ POSTHOOK: Input: default@hbase_table
 296	val_296	1976-05-03 12:33:20
 333	val_333	1976-05-03 12:33:20
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -291,21 +291,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
 165	val_165	1973-03-03 01:46:40
 396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -339,11 +339,11 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
@@ -359,13 +359,13 @@ POSTHOOK: query: DROP TABLE hbase_table
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@hbase_table
 POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+PREHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+POSTHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
 STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
 WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp")
 POSTHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q b/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
index 358eccd..20a0f09 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
 CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -23,7 +23,7 @@ DROP TABLE orc_events;
 CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -46,22 +46,22 @@ SELECT COUNT(*) FROM orc_events WHERE run_date=20120921;
 SELECT COUNT(*) FROM orc_events WHERE run_date=20121121;
 
 INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS orc_events;
 SELECT COUNT(*) FROM orc_events;
 
 INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39;
 SELECT COUNT(*) FROM orc_events;
 
 INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
 SELECT COUNT(*) FROM orc_events;
 
 INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
 SELECT COUNT(*) FROM orc_events;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q b/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
index c462538..7e726fb 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
 CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -23,7 +23,7 @@ DROP TABLE orc_events;
 CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -47,12 +47,12 @@ SELECT COUNT(*) FROM orc_events;
 ALTER TABLE orc_events ADD PARTITION (run_date=201211, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-1/';
 INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS orc_events;
 SELECT COUNT(*) FROM orc_events;
 INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS orc_events;
 SELECT COUNT(*) FROM orc_events;
@@ -63,10 +63,10 @@ SET hive.merge.mapfiles=false;
 ALTER TABLE orc_events ADD PARTITION (run_date=201209, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-2/';
 INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 SHOW PARTITIONS orc_events;
 SELECT COUNT(*) FROM orc_events;
@@ -82,7 +82,7 @@ SET hive.merge.mapfiles=true;
 ALTER TABLE orc_events ADD PARTITION (run_date=201207, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-3/';
 INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 SHOW PARTITIONS orc_events;
 SELECT COUNT(*) FROM orc_events;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
index c563d3a..1aa8c91 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
 CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -23,7 +23,7 @@ DROP TABLE rcfile_events;
 CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -46,22 +46,22 @@ SELECT COUNT(*) FROM rcfile_events WHERE run_date=20120921;
 SELECT COUNT(*) FROM rcfile_events WHERE run_date=20121121;
 
 INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS rcfile_events;
 SELECT COUNT(*) FROM rcfile_events;
 
 INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39;
 SELECT COUNT(*) FROM rcfile_events;
 
 INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
 SELECT COUNT(*) FROM rcfile_events;
 
 INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
 SELECT COUNT(*) FROM rcfile_events;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
index d17c281..a37317f 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
 CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -23,7 +23,7 @@ DROP TABLE rcfile_events;
 CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -47,12 +47,12 @@ SELECT COUNT(*) FROM rcfile_events;
 ALTER TABLE rcfile_events ADD PARTITION (run_date=201211, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-1/';
 INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS rcfile_events;
 SELECT COUNT(*) FROM rcfile_events;
 INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211';
 SHOW PARTITIONS rcfile_events;
 SELECT COUNT(*) FROM rcfile_events;
@@ -63,10 +63,10 @@ SET hive.merge.mapfiles=false;
 ALTER TABLE rcfile_events ADD PARTITION (run_date=201209, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-2/';
 INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 SHOW PARTITIONS rcfile_events;
 SELECT COUNT(*) FROM rcfile_events;
@@ -82,7 +82,7 @@ SET hive.merge.mapfiles=true;
 ALTER TABLE rcfile_events ADD PARTITION (run_date=201207, game_id=39, event_name='hq_change')
 LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-3/';
 INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209';
 SHOW PARTITIONS rcfile_events;
 SELECT COUNT(*) FROM rcfile_events;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out b/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
index 5d1319f..1ef9810 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
 POSTHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@orc_events
 POSTHOOK: query: CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -163,13 +163,13 @@ POSTHOOK: Input: default@orc_events
 #### A masked pattern was here ####
 100
 PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201211
 POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -202,13 +202,13 @@ POSTHOOK: Input: default@orc_events
 #### A masked pattern was here ####
 300
 PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201209/game_id=39
 POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -231,13 +231,13 @@ POSTHOOK: Input: default@orc_events
 #### A masked pattern was here ####
 350
 PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -252,13 +252,13 @@ POSTHOOK: Input: default@orc_events
 #### A masked pattern was here ####
 400
 PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out b/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
index 70e72f7..9de4190 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
 POSTHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@orc_events
 POSTHOOK: query: CREATE TABLE orc_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -156,13 +156,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
 POSTHOOK: Output: default@orc_events
 POSTHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -187,13 +187,13 @@ POSTHOOK: Input: default@orc_events
 #### A masked pattern was here ####
 300
 PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -229,13 +229,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
 POSTHOOK: Output: default@orc_events
 POSTHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -249,13 +249,13 @@ POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq
 POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).uid SIMPLE [(src_events)src_events.FieldSchema(name:uid, type:bigint, comment:null), ]
 POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).user_id SIMPLE [(src_events)src_events.FieldSchema(name:user_id, type:bigint, comment:null), ]
 PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -356,13 +356,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
 POSTHOOK: Output: default@orc_events
 POSTHOOK: Output: default@orc_events@run_date=201207/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@orc_events@run_date=201207/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
index bed10ab..defca3b 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
 POSTHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@rcfile_events
 POSTHOOK: query: CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -163,13 +163,13 @@ POSTHOOK: Input: default@rcfile_events
 #### A masked pattern was here ####
 100
 PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201211
 POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -202,13 +202,13 @@ POSTHOOK: Input: default@rcfile_events
 #### A masked pattern was here ####
 300
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -231,13 +231,13 @@ POSTHOOK: Input: default@rcfile_events
 #### A masked pattern was here ####
 350
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -252,13 +252,13 @@ POSTHOOK: Input: default@rcfile_events
 #### A masked pattern was here ####
 400
 PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
index c6442f9..5db9c7e 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
 POSTHOOK: query: CREATE TABLE src_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@rcfile_events
 POSTHOOK: query: CREATE TABLE rcfile_events
 (
   log_id      BIGINT,
-  time        BIGINT,
+  `time`        BIGINT,
   uid         BIGINT,
   user_id     BIGINT,
   type        INT,
@@ -156,13 +156,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
 POSTHOOK: Output: default@rcfile_events
 POSTHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -187,13 +187,13 @@ POSTHOOK: Input: default@rcfile_events
 #### A masked pattern was here ####
 300
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201211'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -229,13 +229,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
 POSTHOOK: Output: default@rcfile_events
 POSTHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -249,13 +249,13 @@ POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name
 POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).uid SIMPLE [(src_events)src_events.FieldSchema(name:uid, type:bigint, comment:null), ]
 POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).user_id SIMPLE [(src_events)src_events.FieldSchema(name:user_id, type:bigint, comment:null), ]
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events
@@ -376,13 +376,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
 POSTHOOK: Output: default@rcfile_events
 POSTHOOK: Output: default@rcfile_events@run_date=201207/game_id=39/event_name=hq_change
 PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_events
 PREHOOK: Output: default@rcfile_events@run_date=201207/game_id=39/event_name=hq_change
 POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
 WHERE SUBSTR(run_date,1,6)='201209'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_events

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index ade1900..6742423 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -47,6 +47,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.serde2.thrift.Type;
 import org.apache.hive.service.cli.TableSchema;
 
@@ -442,6 +443,8 @@ public abstract class HiveBaseResultSet implements ResultSet {
         return value;
       case TIMESTAMP_TYPE:
         return Timestamp.valueOf((String) value);
+      case TIMESTAMPTZ_TYPE:
+        return TimestampTZ.parse((String) value);
       case DECIMAL_TYPE:
         return new BigDecimal((String)value);
       case DATE_TYPE:

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
index 38918f0..bf42f0d 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
@@ -26,6 +26,7 @@ import java.sql.Types;
 
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.thrift.Type;
 
@@ -94,6 +95,8 @@ public class JdbcColumn {
         return Double.class.getName();
       case  Types.TIMESTAMP:
         return Timestamp.class.getName();
+      case Types.TIMESTAMP_WITH_TIMEZONE:
+        return TimestampTZ.class.getName();
       case Types.DECIMAL:
         return BigInteger.class.getName();
       case Types.BINARY:
@@ -142,6 +145,8 @@ public class JdbcColumn {
       return Type.DATE_TYPE;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return Type.TIMESTAMP_TYPE;
+    } else if (serdeConstants.TIMESTAMPTZ_TYPE_NAME.equalsIgnoreCase(type)) {
+      return Type.TIMESTAMPTZ_TYPE;
     } else if ("interval_year_month".equalsIgnoreCase(type)) {
       return Type.INTERVAL_YEAR_MONTH_TYPE;
     } else if ("interval_day_time".equalsIgnoreCase(type)) {
@@ -195,6 +200,8 @@ public class JdbcColumn {
       return serdeConstants.BIGINT_TYPE_NAME;
     } else if ("timestamp".equalsIgnoreCase(type)) {
       return serdeConstants.TIMESTAMP_TYPE_NAME;
+    } else if (serdeConstants.TIMESTAMPTZ_TYPE_NAME.equalsIgnoreCase(type)) {
+      return serdeConstants.TIMESTAMPTZ_TYPE_NAME;
     } else if ("date".equalsIgnoreCase(type)) {
       return serdeConstants.DATE_TYPE_NAME;
     } else if ("interval_year_month".equalsIgnoreCase(type)) {
@@ -240,6 +247,7 @@ public class JdbcColumn {
     case Types.DATE:
       return 10;
     case Types.TIMESTAMP:
+    case Types.TIMESTAMP_WITH_TIMEZONE:
       return columnPrecision(hiveType, columnAttributes);
 
     // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
@@ -294,6 +302,8 @@ public class JdbcColumn {
       return 10;
     case Types.TIMESTAMP:
       return 29;
+    case Types.TIMESTAMP_WITH_TIMEZONE:
+      return 31;
     case Types.DECIMAL:
       return columnAttributes.precision;
     case Types.OTHER:
@@ -338,6 +348,7 @@ public class JdbcColumn {
     case Types.DOUBLE:
       return 15;
     case  Types.TIMESTAMP:
+    case Types.TIMESTAMP_WITH_TIMEZONE:
       return 9;
     case Types.DECIMAL:
       return columnAttributes.scale;

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index bf18a8d..9795f3e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -402,6 +402,7 @@ public final class FunctionRegistry {
 
     system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class);
     system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class);
+    system.registerGenericUDF(serdeConstants.TIMESTAMPTZ_TYPE_NAME, GenericUDFToTimestampTZ.class);
     system.registerGenericUDF(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, GenericUDFToIntervalYearMonth.class);
     system.registerGenericUDF(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, GenericUDFToIntervalDayTime.class);
     system.registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, GenericUDFToBinary.class);
@@ -1536,7 +1537,8 @@ public final class FunctionRegistry {
         udfClass == UDFToShort.class || udfClass == UDFToString.class ||
         udfClass == GenericUDFToVarchar.class || udfClass == GenericUDFToChar.class ||
         udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class ||
-        udfClass == GenericUDFToDate.class  || udfClass == GenericUDFToDecimal.class;
+        udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class ||
+        udfClass == GenericUDFToTimestampTZ.class;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
index f8b55da..af5e90f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
@@ -32,6 +32,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.LlapDaemonInfo;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
@@ -471,6 +472,7 @@ public class GroupByOperator extends Operator<GroupByDesc> {
       keyPositionsSize.add(new Integer(pos));
       return javaObjectOverHead;
     case TIMESTAMP:
+    case TIMESTAMPTZ:
       return javaObjectOverHead + javaSizePrimitiveType;
     default:
       return javaSizeUnknownType;
@@ -503,7 +505,7 @@ public class GroupByOperator extends Operator<GroupByDesc> {
       return javaSizePrimitiveType;
     }
 
-    if (c.isInstance(new Timestamp(0))){
+    if (c.isInstance(new Timestamp(0)) || c.isInstance(new TimestampTZ())) {
       return javaObjectOverHead + javaSizePrimitiveType;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index 01a652d..a29dd85 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -38,6 +38,7 @@ import java.util.Properties;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.common.CopyOnFirstWriteProperties;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator;
@@ -223,6 +224,7 @@ public class SerializationUtilities {
       KryoWithHooks kryo = new KryoWithHooks();
       kryo.register(java.sql.Date.class, new SqlDateSerializer());
       kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
+      kryo.register(TimestampTZ.class, new TimestampTZSerializer());
       kryo.register(Path.class, new PathSerializer());
       kryo.register(Arrays.asList("").getClass(), new ArraysAsListSerializer());
       kryo.register(CopyOnFirstWriteProperties.class, new CopyOnFirstWritePropertiesSerializer());
@@ -307,6 +309,22 @@ public class SerializationUtilities {
     }
   }
 
+  private static class TimestampTZSerializer extends com.esotericsoftware.kryo.Serializer<TimestampTZ> {
+
+    @Override
+    public void write(Kryo kryo, Output output, TimestampTZ object) {
+      output.writeLong(object.getEpochSecond());
+      output.writeInt(object.getNanos());
+    }
+
+    @Override
+    public TimestampTZ read(Kryo kryo, Input input, Class<TimestampTZ> type) {
+      long seconds = input.readLong();
+      int nanos = input.readInt();
+      return new TimestampTZ(seconds, nanos);
+    }
+  }
+
   /**
    * Custom Kryo serializer for sql date, otherwise Kryo gets confused between
    * java.sql.Date and java.util.Date while deserializing

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
index 38308c9..2df7588 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
@@ -200,6 +200,9 @@ public class TypeConverter {
     case TIMESTAMP:
       convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
       break;
+    case TIMESTAMPTZ:
+      convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
+      break;
     case INTERVAL_YEAR_MONTH:
       convertedType = dtFactory.createSqlIntervalType(
           new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1)));

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0cf9205..77bc12c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -195,6 +195,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     TokenToTypeName.put(HiveParser.TOK_DATE, serdeConstants.DATE_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DATETIME, serdeConstants.DATETIME_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_TIMESTAMPTZ, serdeConstants.TIMESTAMPTZ_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_INTERVAL_DAY_TIME, serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DECIMAL, serdeConstants.DECIMAL_TYPE_NAME);

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 190b66b..cebe441 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -130,6 +130,9 @@ KW_PRECISION: 'PRECISION';
 KW_DATE: 'DATE';
 KW_DATETIME: 'DATETIME';
 KW_TIMESTAMP: 'TIMESTAMP';
+KW_TIMESTAMPTZ: 'TIMESTAMPTZ';
+KW_TIME: 'TIME';
+KW_ZONE: 'ZONE';
 KW_INTERVAL: 'INTERVAL';
 KW_DECIMAL: 'DECIMAL' | 'DEC';
 KW_STRING: 'STRING';

http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 3136c93..218fa8a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -123,6 +123,7 @@ TOK_DATELITERAL;
 TOK_DATETIME;
 TOK_TIMESTAMP;
 TOK_TIMESTAMPLITERAL;
+TOK_TIMESTAMPTZ;
 TOK_INTERVAL_YEAR_MONTH;
 TOK_INTERVAL_YEAR_MONTH_LITERAL;
 TOK_INTERVAL_DAY_TIME;
@@ -490,6 +491,9 @@ import org.apache.hadoop.hive.conf.HiveConf;
     xlateMap.put("KW_DATE", "DATE");
     xlateMap.put("KW_DATETIME", "DATETIME");
     xlateMap.put("KW_TIMESTAMP", "TIMESTAMP");
+    xlateMap.put("KW_TIMESTAMPTZ", "TIMESTAMPTZ");
+    xlateMap.put("KW_TIME", "TIME");
+    xlateMap.put("KW_ZONE", "ZONE");
     xlateMap.put("KW_STRING", "STRING");
     xlateMap.put("KW_BINARY", "BINARY");
     xlateMap.put("KW_ARRAY", "ARRAY");
@@ -2356,6 +2360,8 @@ primitiveType
     | KW_DATE          ->    TOK_DATE
     | KW_DATETIME      ->    TOK_DATETIME
     | KW_TIMESTAMP     ->    TOK_TIMESTAMP
+    | KW_TIMESTAMPTZ   ->    TOK_TIMESTAMPTZ
+    | KW_TIMESTAMP KW_WITH KW_TIME KW_ZONE -> TOK_TIMESTAMPTZ
     // Uncomment to allow intervals as table column types
     //| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH
     //| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME