You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by li...@apache.org on 2017/05/11 06:47:22 UTC
[1/3] hive git commit: HIVE-14412: Add timestamp with time zone (Rui
Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin,
Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master ee91b8ec5 -> 6b6a00ffb
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampTZ.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampTZ.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampTZ.java
new file mode 100644
index 0000000..df5c586
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestampTZ.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampTZObjectInspector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+import java.time.format.DateTimeParseException;
+
+/**
+ * LazyPrimitive for TimestampTZ. Similar to LazyTimestamp.
+ */
+public class LazyTimestampTZ extends
+ LazyPrimitive<LazyTimestampTZObjectInspector, TimestampTZWritable> {
+
+ private static final Logger LOG = LoggerFactory.getLogger(LazyTimestampTZ.class);
+
+ public LazyTimestampTZ(LazyTimestampTZObjectInspector lazyTimestampTZObjectInspector) {
+ super(lazyTimestampTZObjectInspector);
+ data = new TimestampTZWritable();
+ }
+
+ public LazyTimestampTZ(LazyTimestampTZ copy) {
+ super(copy);
+ data = new TimestampTZWritable(copy.data);
+ }
+
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ String s = null;
+ if (!LazyUtils.isDateMaybe(bytes.getData(), start, length)) {
+ isNull = true;
+ return;
+ }
+
+ TimestampTZ t = null;
+ try {
+ s = new String(bytes.getData(), start, length, "US-ASCII");
+ if (s.equals("NULL")) {
+ isNull = true;
+ logExceptionMessage(bytes, start, length,
+ serdeConstants.TIMESTAMPTZ_TYPE_NAME.toUpperCase());
+ } else {
+ t = TimestampTZ.parse(s);
+ isNull = false;
+ }
+ } catch (UnsupportedEncodingException e) {
+ isNull = true;
+ LOG.error("Unsupported encoding found ", e);
+ } catch (DateTimeParseException e) {
+ isNull = true;
+ logExceptionMessage(bytes, start, length, serdeConstants.TIMESTAMPTZ_TYPE_NAME.toUpperCase());
+ }
+ data.set(t);
+ }
+
+ @Override
+ public TimestampTZWritable getWritableObject() {
+ return data;
+ }
+
+ public static void writeUTF8(OutputStream out, TimestampTZWritable i) throws IOException {
+ if (i == null) {
+ out.write(TimestampTZWritable.nullBytes);
+ } else {
+ out.write(i.toString().getBytes("US-ASCII"));
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
index 73c72e1..c811753 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
@@ -229,7 +230,8 @@ public final class LazyUtils {
PrimitiveObjectInspector oi, boolean escaped, byte escapeChar,
boolean[] needsEscape) throws IOException {
- switch (oi.getPrimitiveCategory()) {
+ PrimitiveObjectInspector.PrimitiveCategory category = oi.getPrimitiveCategory();
+ switch (category) {
case BOOLEAN: {
boolean b = ((BooleanObjectInspector) oi).get(o);
if (b) {
@@ -305,6 +307,11 @@ public final class LazyUtils {
((TimestampObjectInspector) oi).getPrimitiveWritableObject(o));
break;
}
+ case TIMESTAMPTZ: {
+ LazyTimestampTZ.writeUTF8(out, ((TimestampTZObjectInspector) oi).
+ getPrimitiveWritableObject(o));
+ break;
+ }
case INTERVAL_YEAR_MONTH: {
LazyHiveIntervalYearMonth.writeUTF8(out,
((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o));
@@ -322,7 +329,7 @@ public final class LazyUtils {
break;
}
default: {
- throw new RuntimeException("Hive internal error.");
+ throw new RuntimeException("Unknown primitive type: " + category);
}
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
index 5601734..6d1ee1e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
@@ -71,6 +71,8 @@ public final class LazyPrimitiveObjectInspectorFactory {
new LazyDateObjectInspector();
public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR =
new LazyTimestampObjectInspector();
+ public static final LazyTimestampTZObjectInspector LAZY_TIMESTAMPTZ_OBJECT_INSPECTOR =
+ new LazyTimestampTZObjectInspector();
public static final LazyHiveIntervalYearMonthObjectInspector LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR =
new LazyHiveIntervalYearMonthObjectInspector();
public static final LazyHiveIntervalDayTimeObjectInspector LAZY_INTERVAL_DAY_TIME_OBJECT_INSPECTOR =
@@ -111,6 +113,8 @@ public final class LazyPrimitiveObjectInspectorFactory {
LAZY_DATE_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
LAZY_TIMESTAMP_OBJECT_INSPECTOR);
+ cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(
+ serdeConstants.TIMESTAMPTZ_TYPE_NAME), LAZY_TIMESTAMPTZ_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR);
cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampTZObjectInspector.java
new file mode 100644
index 0000000..7336385
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampTZObjectInspector.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyTimestampTZ;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class LazyTimestampTZObjectInspector
+ extends AbstractPrimitiveLazyObjectInspector<TimestampTZWritable>
+ implements TimestampTZObjectInspector {
+
+ public LazyTimestampTZObjectInspector() {
+ super(TypeInfoFactory.timestampTZTypeInfo);
+ }
+
+ @Override
+ public TimestampTZ getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((LazyTimestampTZ) o).getWritableObject().getTimestampTZ();
+ }
+
+ @Override
+ public Object copyObject(Object o) {
+ return o == null ? null : new LazyTimestampTZ((LazyTimestampTZ) o);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
index 52f3527..16609bb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObje
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampTZObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableVoidObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -86,6 +87,8 @@ public final class LazyBinaryFactory {
return new LazyBinaryDate((WritableDateObjectInspector) oi);
case TIMESTAMP:
return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi);
+ case TIMESTAMPTZ:
+ return new LazyBinaryTimestampTZ((WritableTimestampTZObjectInspector) oi);
case INTERVAL_YEAR_MONTH:
return new LazyBinaryHiveIntervalYearMonth((WritableHiveIntervalYearMonthObjectInspector) oi);
case INTERVAL_DAY_TIME:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 56b4ca3..7cdedd6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -24,6 +24,8 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -513,6 +515,11 @@ public class LazyBinarySerDe extends AbstractSerDe {
t.writeToByteStream(byteStream);
return;
}
+ case TIMESTAMPTZ: {
+ TimestampTZWritable t = ((TimestampTZObjectInspector) poi).getPrimitiveWritableObject(obj);
+ t.writeToByteStream(byteStream);
+ return;
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable intervalYearMonth =
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestampTZ.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestampTZ.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestampTZ.java
new file mode 100644
index 0000000..6d9ca6e
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryTimestampTZ.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.lazybinary;
+
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampTZObjectInspector;
+
+public class LazyBinaryTimestampTZ extends
+ LazyBinaryPrimitive<WritableTimestampTZObjectInspector, TimestampTZWritable> {
+
+ public LazyBinaryTimestampTZ(WritableTimestampTZObjectInspector oi) {
+ super(oi);
+ data = new TimestampTZWritable();
+ }
+
+ @Override
+ public void init(ByteArrayRef bytes, int start, int length) {
+ data.set(bytes.getData(), start);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
index 8237b64..5666516 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
@@ -22,6 +22,7 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -204,6 +205,10 @@ public final class LazyBinaryUtils {
recordInfo.elementOffset = 0;
recordInfo.elementSize = TimestampWritable.getTotalLength(bytes, offset);
break;
+ case TIMESTAMPTZ:
+ recordInfo.elementOffset = 0;
+ recordInfo.elementSize = TimestampTZWritable.getTotalLength(bytes, offset);
+ break;
case INTERVAL_YEAR_MONTH:
recordInfo.elementOffset = 0;
recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
index 24b3d4e..ca96e33 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObject
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampTZObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -123,6 +124,9 @@ public final class ObjectInspectorConverters {
return new PrimitiveObjectInspectorConverter.TimestampConverter(
inputOI,
(SettableTimestampObjectInspector) outputOI);
+ case TIMESTAMPTZ:
+ return new PrimitiveObjectInspectorConverter.TimestampTZConverter(inputOI,
+ (SettableTimestampTZObjectInspector) outputOI);
case INTERVAL_YEAR_MONTH:
return new PrimitiveObjectInspectorConverter.HiveIntervalYearMonthConverter(
inputOI,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index ba44bae..697d22e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -27,6 +27,9 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampTZObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.serde.serdeConstants;
@@ -412,6 +415,10 @@ public final class ObjectInspectorUtils {
result = loi.getPrimitiveJavaObject(o);
if (loi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
result = PrimitiveObjectInspectorFactory.javaTimestampObjectInspector.copyObject(result);
+ } else if (loi.getPrimitiveCategory() ==
+ PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMPTZ) {
+ result = PrimitiveObjectInspectorFactory.javaTimestampTZObjectInspector.
+ copyObject(result);
}
break;
case WRITABLE:
@@ -694,6 +701,9 @@ public final class ObjectInspectorUtils {
TimestampWritable t = ((TimestampObjectInspector) poi)
.getPrimitiveWritableObject(o);
return t.hashCode();
+ case TIMESTAMPTZ:
+ TimestampTZWritable tstz = ((TimestampTZObjectInspector) poi).getPrimitiveWritableObject(o);
+ return tstz.hashCode();
case INTERVAL_YEAR_MONTH:
HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi)
.getPrimitiveWritableObject(o);
@@ -955,6 +965,13 @@ public final class ObjectInspectorUtils {
.getPrimitiveWritableObject(o2);
return t1.compareTo(t2);
}
+ case TIMESTAMPTZ: {
+ TimestampTZWritable tstz1 = ((TimestampTZObjectInspector) poi1).
+ getPrimitiveWritableObject(o1);
+ TimestampTZWritable tstz2 = ((TimestampTZObjectInspector) poi2).
+ getPrimitiveWritableObject(o2);
+ return tstz1.compareTo(tstz2);
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1)
.getPrimitiveWritableObject(o1);
@@ -1322,6 +1339,8 @@ public final class ObjectInspectorUtils {
return oi instanceof SettableDateObjectInspector;
case TIMESTAMP:
return oi instanceof SettableTimestampObjectInspector;
+ case TIMESTAMPTZ:
+ return oi instanceof SettableTimestampTZObjectInspector;
case INTERVAL_YEAR_MONTH:
return oi instanceof SettableHiveIntervalYearMonthObjectInspector;
case INTERVAL_DAY_TIME:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
index 70633f3..b037540 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
@@ -31,8 +31,8 @@ public interface PrimitiveObjectInspector extends ObjectInspector {
*/
public static enum PrimitiveCategory {
VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING,
- DATE, TIMESTAMP, BINARY, DECIMAL, VARCHAR, CHAR, INTERVAL_YEAR_MONTH, INTERVAL_DAY_TIME,
- UNKNOWN
+ DATE, TIMESTAMP, TIMESTAMPTZ, BINARY, DECIMAL, VARCHAR, CHAR,
+ INTERVAL_YEAR_MONTH, INTERVAL_DAY_TIME, UNKNOWN
};
public PrimitiveTypeInfo getTypeInfo();
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampTZObjectInspector.java
new file mode 100644
index 0000000..32b9c69
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampTZObjectInspector.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class JavaTimestampTZObjectInspector
+ extends AbstractPrimitiveJavaObjectInspector implements SettableTimestampTZObjectInspector {
+
+ JavaTimestampTZObjectInspector() {
+ super(TypeInfoFactory.timestampTZTypeInfo);
+ }
+
+ @Override
+ public Object set(Object o, byte[] bytes, int offset) {
+ TimestampTZWritable.setTimestampTZ((TimestampTZ) o, bytes, offset);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, TimestampTZ t) {
+ if (t == null) {
+ return null;
+ }
+ ((TimestampTZ) o).set(t.getEpochSecond(), t.getNanos());
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, TimestampTZWritable t) {
+ if (t == null) {
+ return null;
+ }
+ ((TimestampTZ) o).set(t.getSeconds(), t.getNanos());
+ return o;
+ }
+
+ @Override
+ public Object create(byte[] bytes, int offset) {
+ TimestampTZ t = new TimestampTZ();
+ TimestampTZWritable.setTimestampTZ(t, bytes, offset);
+ return t;
+ }
+
+ @Override
+ public Object create(TimestampTZ t) {
+ return new TimestampTZ(t.getZonedDateTime());
+ }
+
+ @Override
+ public TimestampTZWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : new TimestampTZWritable((TimestampTZ) o);
+ }
+
+ @Override
+ public TimestampTZ getPrimitiveJavaObject(Object o) {
+ return o == null ? null : (TimestampTZ) o;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
index e08ad43..d4b7a32 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
@@ -292,6 +293,28 @@ public class PrimitiveObjectInspectorConverter {
}
}
+ public static class TimestampTZConverter implements Converter {
+ final PrimitiveObjectInspector inputOI;
+ final SettableTimestampTZObjectInspector outputOI;
+ final Object r;
+
+ public TimestampTZConverter(PrimitiveObjectInspector inputOI,
+ SettableTimestampTZObjectInspector outputOI) {
+ this.inputOI = inputOI;
+ this.outputOI = outputOI;
+ r = outputOI.create(new TimestampTZ());
+ }
+
+ @Override
+ public Object convert(Object input) {
+ if (input == null) {
+ return null;
+ }
+
+ return outputOI.set(r, PrimitiveObjectInspectorUtils.getTimestampTZ(input, inputOI));
+ }
+ }
+
public static class HiveIntervalYearMonthConverter implements Converter {
PrimitiveObjectInspector inputOI;
SettableHiveIntervalYearMonthObjectInspector outputOI;
@@ -466,6 +489,9 @@ public class PrimitiveObjectInspectorConverter {
t.set(((TimestampObjectInspector) inputOI)
.getPrimitiveWritableObject(input).toString());
return t;
+ case TIMESTAMPTZ:
+ t.set(((TimestampTZObjectInspector) inputOI).getPrimitiveWritableObject(input).toString());
+ return t;
case INTERVAL_YEAR_MONTH:
t.set(((HiveIntervalYearMonthObjectInspector) inputOI)
.getPrimitiveWritableObject(input).toString());
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index 2ed0843..9ea6609 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -87,6 +88,8 @@ public final class PrimitiveObjectInspectorFactory {
new WritableDateObjectInspector();
public static final WritableTimestampObjectInspector writableTimestampObjectInspector =
new WritableTimestampObjectInspector();
+ public static final WritableTimestampTZObjectInspector writableTimestampTZObjectInspector =
+ new WritableTimestampTZObjectInspector();
public static final WritableHiveIntervalYearMonthObjectInspector writableHiveIntervalYearMonthObjectInspector =
new WritableHiveIntervalYearMonthObjectInspector();
public static final WritableHiveIntervalDayTimeObjectInspector writableHiveIntervalDayTimeObjectInspector =
@@ -124,6 +127,8 @@ public final class PrimitiveObjectInspectorFactory {
writableDateObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
writableTimestampObjectInspector);
+ cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(
+ serdeConstants.TIMESTAMPTZ_TYPE_NAME), writableTimestampTZObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
writableHiveIntervalYearMonthObjectInspector);
cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
@@ -149,6 +154,8 @@ public final class PrimitiveObjectInspectorFactory {
primitiveCategoryToWritableOI.put(PrimitiveCategory.VOID, writableVoidObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.DATE, writableDateObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMP, writableTimestampObjectInspector);
+ primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMPTZ,
+ writableTimestampTZObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, writableHiveIntervalYearMonthObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, writableHiveIntervalDayTimeObjectInspector);
primitiveCategoryToWritableOI.put(PrimitiveCategory.BINARY, writableBinaryObjectInspector);
@@ -181,6 +188,8 @@ public final class PrimitiveObjectInspectorFactory {
new JavaDateObjectInspector();
public static final JavaTimestampObjectInspector javaTimestampObjectInspector =
new JavaTimestampObjectInspector();
+ public static final JavaTimestampTZObjectInspector javaTimestampTZObjectInspector =
+ new JavaTimestampTZObjectInspector();
public static final JavaHiveIntervalYearMonthObjectInspector javaHiveIntervalYearMonthObjectInspector =
new JavaHiveIntervalYearMonthObjectInspector();
public static final JavaHiveIntervalDayTimeObjectInspector javaHiveIntervalDayTimeObjectInspector =
@@ -218,6 +227,8 @@ public final class PrimitiveObjectInspectorFactory {
javaDateObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME),
javaTimestampObjectInspector);
+ cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.timestampTZTypeInfo,
+ javaTimestampTZObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME),
javaHiveIntervalYearMonthObjectInspector);
cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME),
@@ -243,6 +254,7 @@ public final class PrimitiveObjectInspectorFactory {
primitiveCategoryToJavaOI.put(PrimitiveCategory.VOID, javaVoidObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.DATE, javaDateObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMP, javaTimestampObjectInspector);
+ primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMPTZ, javaTimestampTZObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, javaHiveIntervalYearMonthObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, javaHiveIntervalDayTimeObjectInspector);
primitiveCategoryToJavaOI.put(PrimitiveCategory.BINARY, javaByteArrayObjectInspector);
@@ -336,6 +348,8 @@ public final class PrimitiveObjectInspectorFactory {
return new WritableConstantDateObjectInspector((DateWritable)value);
case TIMESTAMP:
return new WritableConstantTimestampObjectInspector((TimestampWritable)value);
+ case TIMESTAMPTZ:
+ return new WritableConstantTimestampTZObjectInspector((TimestampTZWritable) value);
case INTERVAL_YEAR_MONTH:
return new WritableConstantHiveIntervalYearMonthObjectInspector((HiveIntervalYearMonthWritable) value);
case INTERVAL_DAY_TIME:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 9642a7e..a6cda4e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -27,7 +27,9 @@ import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.HiveChar;
@@ -226,6 +228,9 @@ public final class PrimitiveObjectInspectorUtils {
public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null,
Timestamp.class, TimestampWritable.class);
+ public static final PrimitiveTypeEntry timestampTZTypeEntry = new PrimitiveTypeEntry(
+ PrimitiveCategory.TIMESTAMPTZ, serdeConstants.TIMESTAMPTZ_TYPE_NAME, null,
+ TimestampTZ.class, TimestampTZWritable.class);
public static final PrimitiveTypeEntry intervalYearMonthTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, null,
HiveIntervalYearMonth.class, HiveIntervalYearMonthWritable.class);
@@ -261,6 +266,7 @@ public final class PrimitiveObjectInspectorUtils {
registerType(shortTypeEntry);
registerType(dateTypeEntry);
registerType(timestampTypeEntry);
+ registerType(timestampTZTypeEntry);
registerType(intervalYearMonthTypeEntry);
registerType(intervalDayTimeTypeEntry);
registerType(decimalTypeEntry);
@@ -439,6 +445,10 @@ public final class PrimitiveObjectInspectorUtils {
return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1)
.equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2));
}
+ case TIMESTAMPTZ: {
+ return ((TimestampTZObjectInspector) oi1).getPrimitiveWritableObject(o1).equals(
+ ((TimestampTZObjectInspector) oi2).getPrimitiveWritableObject(o2));
+ }
case INTERVAL_YEAR_MONTH: {
return ((HiveIntervalYearMonthObjectInspector) oi1).getPrimitiveWritableObject(o1)
.equals(((HiveIntervalYearMonthObjectInspector) oi2).getPrimitiveWritableObject(o2));
@@ -461,39 +471,6 @@ public final class PrimitiveObjectInspectorUtils {
}
/**
- * Convert a primitive object to double.
- */
- public static double convertPrimitiveToDouble(Object o, PrimitiveObjectInspector oi) {
- switch (oi.getPrimitiveCategory()) {
- case BOOLEAN:
- return ((BooleanObjectInspector) oi).get(o) ? 1 : 0;
- case BYTE:
- return ((ByteObjectInspector) oi).get(o);
- case SHORT:
- return ((ShortObjectInspector) oi).get(o);
- case INT:
- return ((IntObjectInspector) oi).get(o);
- case LONG:
- return ((LongObjectInspector) oi).get(o);
- case FLOAT:
- return ((FloatObjectInspector) oi).get(o);
- case DOUBLE:
- return ((DoubleObjectInspector) oi).get(o);
- case STRING:
- return Double.valueOf(((StringObjectInspector) oi).getPrimitiveJavaObject(o));
- case TIMESTAMP:
- return ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)
- .getDouble();
- case DECIMAL:
- // TODO: lossy conversion!
- return ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o).doubleValue();
- case DATE: // unsupported conversion
- default:
- throw new NumberFormatException();
- }
- }
-
- /**
* Compare 2 Primitive Objects with their Object Inspector, conversions
* allowed. Note that NULL does not equal to NULL according to SQL standard.
*/
@@ -509,8 +486,7 @@ public final class PrimitiveObjectInspectorUtils {
// If not equal, convert all to double and compare
try {
- return convertPrimitiveToDouble(o1, oi1) == convertPrimitiveToDouble(o2,
- oi2);
+ return getDouble(o1, oi1) == getDouble(o2, oi2);
} catch (NumberFormatException e) {
return false;
}
@@ -904,6 +880,9 @@ public final class PrimitiveObjectInspectorUtils {
case TIMESTAMP:
result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString();
break;
+ case TIMESTAMPTZ:
+ result = ((TimestampTZObjectInspector) oi).getPrimitiveWritableObject(o).toString();
+ break;
case INTERVAL_YEAR_MONTH:
result = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o).toString();
break;
@@ -1092,6 +1071,14 @@ public final class PrimitiveObjectInspectorUtils {
result = DateWritable.timeToDate(
((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds());
break;
+ case TIMESTAMPTZ:
+ String tstz = oi.getPrimitiveWritableObject(o).toString();
+ int divSpace = tstz.indexOf(' ');
+ if (divSpace == -1) {
+ return null;
+ }
+ result = Date.valueOf(tstz.substring(0, divSpace));
+ break;
default:
throw new RuntimeException("Cannot convert to Date from: "
+ oi.getTypeName());
@@ -1161,6 +1148,15 @@ public final class PrimitiveObjectInspectorUtils {
case TIMESTAMP:
result = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
break;
+ case TIMESTAMPTZ:
+ String tstz = inputOI.getPrimitiveWritableObject(o).toString();
+ int index = tstz.indexOf(" ");
+ index = tstz.indexOf(" ", index + 1);
+ if (index == -1) {
+ return null;
+ }
+ result = Timestamp.valueOf(tstz.substring(0, index));
+ break;
default:
throw new RuntimeException("Hive 2 Internal error: unknown type: "
+ inputOI.getTypeName());
@@ -1168,6 +1164,43 @@ public final class PrimitiveObjectInspectorUtils {
return result;
}
+ public static TimestampTZ getTimestampTZ(Object o, PrimitiveObjectInspector oi) {
+ if (o == null) {
+ return null;
+ }
+ switch (oi.getPrimitiveCategory()) {
+ case VOID: {
+ return null;
+ }
+ // The resulting timestamp with time zone will have TZ in UTC
+ // instead of the original TZ in the string representation.
+ case STRING: {
+ StringObjectInspector soi = (StringObjectInspector) oi;
+ String s = soi.getPrimitiveJavaObject(o).trim();
+ return TimestampTZ.parseOrNull(s);
+ }
+ case CHAR:
+ case VARCHAR: {
+ String s = getString(o, oi).trim();
+ return TimestampTZ.parseOrNull(s);
+ }
+ case DATE: {
+ Date date = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).get();
+ return TimestampTZ.convert(date);
+ }
+ case TIMESTAMP: {
+ Timestamp ts = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getTimestamp();
+ return TimestampTZ.convert(ts);
+ }
+ case TIMESTAMPTZ: {
+ return ((TimestampTZObjectInspector) oi).getPrimitiveWritableObject(o).getTimestampTZ();
+ }
+ default:
+ throw new RuntimeException("Cannot convert to " + serdeConstants.TIMESTAMPTZ_TYPE_NAME +
+ " from: " + oi.getTypeName());
+ }
+ }
+
static Timestamp getTimestampFromString(String s) {
Timestamp result;
s = s.trim();
@@ -1298,6 +1331,7 @@ public final class PrimitiveObjectInspectorUtils {
return PrimitiveGrouping.BOOLEAN_GROUP;
case TIMESTAMP:
case DATE:
+ case TIMESTAMPTZ:
return PrimitiveGrouping.DATE_GROUP;
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampTZObjectInspector.java
new file mode 100644
index 0000000..032bc38
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampTZObjectInspector.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+
+public interface SettableTimestampTZObjectInspector extends TimestampTZObjectInspector {
+
+ Object set(Object o, byte[] bytes, int offset);
+
+ Object set(Object o, TimestampTZ t);
+
+ Object set(Object o, TimestampTZWritable t);
+
+ Object create(byte[] bytes, int offset);
+
+ Object create(TimestampTZ t);
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampTZObjectInspector.java
new file mode 100644
index 0000000..d142d38
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampTZObjectInspector.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+public interface TimestampTZObjectInspector extends PrimitiveObjectInspector {
+
+ TimestampTZWritable getPrimitiveWritableObject(Object o);
+
+ TimestampTZ getPrimitiveJavaObject(Object o);
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampTZObjectInspector.java
new file mode 100644
index 0000000..5805ce8
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantTimestampTZObjectInspector.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+
+public class WritableConstantTimestampTZObjectInspector
+ extends WritableTimestampTZObjectInspector implements ConstantObjectInspector {
+
+ private TimestampTZWritable value;
+
+ public WritableConstantTimestampTZObjectInspector(TimestampTZWritable value) {
+ this.value = value;
+ }
+
+ @Override
+ public Object getWritableConstantValue() {
+ return value;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampTZObjectInspector.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampTZObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampTZObjectInspector.java
new file mode 100644
index 0000000..0b622c1
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampTZObjectInspector.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class WritableTimestampTZObjectInspector extends
+ AbstractPrimitiveWritableObjectInspector implements SettableTimestampTZObjectInspector {
+
+ public WritableTimestampTZObjectInspector() {
+ super(TypeInfoFactory.timestampTZTypeInfo);
+ }
+
+ @Override
+ public TimestampTZWritable getPrimitiveWritableObject(Object o) {
+ return o == null ? null : (TimestampTZWritable) o;
+ }
+
+ @Override
+ public Object set(Object o, byte[] bytes, int offset) {
+ ((TimestampTZWritable) o).set(bytes, offset);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, TimestampTZ t) {
+ if (t == null) {
+ return null;
+ }
+ ((TimestampTZWritable) o).set(t);
+ return o;
+ }
+
+ @Override
+ public Object set(Object o, TimestampTZWritable t) {
+ if (t == null) {
+ return null;
+ }
+ ((TimestampTZWritable) o).set(t);
+ return o;
+ }
+
+ @Override
+ public Object create(byte[] bytes, int offset) {
+ return new TimestampTZWritable(bytes, offset);
+ }
+
+ @Override
+ public Object create(TimestampTZ t) {
+ return new TimestampTZWritable(t);
+ }
+
+ @Override
+ public TimestampTZ getPrimitiveJavaObject(Object o) {
+ return o == null ? null : ((TimestampTZWritable) o).getTimestampTZ();
+ }
+
+ @Override
+ public Object copyObject(Object o) {
+ return o == null ? null : new TimestampTZWritable((TimestampTZWritable) o);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
index 0ad8c02..809f45b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
@@ -19,8 +19,10 @@
package org.apache.hadoop.hive.serde2.thrift;
import java.sql.DatabaseMetaData;
+import java.sql.Types;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hive.service.rpc.thrift.TTypeId;
@@ -70,6 +72,9 @@ public enum Type {
TIMESTAMP_TYPE("TIMESTAMP",
java.sql.Types.TIMESTAMP,
TTypeId.TIMESTAMP_TYPE),
+ TIMESTAMPTZ_TYPE(serdeConstants.TIMESTAMPTZ_TYPE_NAME.toUpperCase(),
+ Types.TIMESTAMP_WITH_TIMEZONE,
+ TTypeId.TIMESTAMPTZ_TYPE),
INTERVAL_YEAR_MONTH_TYPE("INTERVAL_YEAR_MONTH",
java.sql.Types.OTHER,
TTypeId.INTERVAL_YEAR_MONTH_TYPE),
@@ -225,6 +230,9 @@ public enum Type {
case TIMESTAMP: {
return Type.TIMESTAMP_TYPE;
}
+ case TIMESTAMPTZ: {
+ return Type.TIMESTAMPTZ_TYPE;
+ }
case INTERVAL_YEAR_MONTH: {
return Type.INTERVAL_YEAR_MONTH_TYPE;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
index 43c4819..b0e0bf2 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
@@ -55,6 +55,8 @@ public final class TypeInfoFactory {
public static final PrimitiveTypeInfo shortTypeInfo = new PrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME);
public static final PrimitiveTypeInfo dateTypeInfo = new PrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME);
public static final PrimitiveTypeInfo timestampTypeInfo = new PrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME);
+ public static final PrimitiveTypeInfo timestampTZTypeInfo =
+ new PrimitiveTypeInfo(serdeConstants.TIMESTAMPTZ_TYPE_NAME);
public static final PrimitiveTypeInfo intervalYearMonthTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
public static final PrimitiveTypeInfo intervalDayTimeTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
public static final PrimitiveTypeInfo binaryTypeInfo = new PrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME);
@@ -85,6 +87,7 @@ public final class TypeInfoFactory {
cachedPrimitiveTypeInfo.put(serdeConstants.SMALLINT_TYPE_NAME, shortTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.DATE_TYPE_NAME, dateTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMP_TYPE_NAME, timestampTypeInfo);
+ cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMPTZ_TYPE_NAME, timestampTZTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, intervalYearMonthTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, intervalDayTimeTypeInfo);
cachedPrimitiveTypeInfo.put(serdeConstants.BINARY_TYPE_NAME, binaryTypeInfo);
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampTZWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampTZWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampTZWritable.java
new file mode 100644
index 0000000..73b81b9
--- /dev/null
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampTZWritable.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.code.tempusfugit.concurrency.RepeatingRule;
+import com.google.code.tempusfugit.concurrency.annotations.Repeating;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.io.WritableComparator;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+import java.util.concurrent.ThreadLocalRandom;
+
+public class TestTimestampTZWritable {
+
+ @Rule
+ public RepeatingRule repeatingRule = new RepeatingRule();
+
+ @Test
+ @Repeating(repetition = 10)
+ public void testSeconds() {
+ // just 1 VInt
+ long seconds = ThreadLocalRandom.current().nextLong(Integer.MAX_VALUE);
+ TimestampTZ tstz = new TimestampTZ(seconds, 0);
+ verifyConversion(tstz);
+
+ // 2 VInt
+ seconds = ThreadLocalRandom.current().nextLong(Integer.MAX_VALUE) + Integer.MAX_VALUE + 1;
+ if (ThreadLocalRandom.current().nextBoolean()) {
+ seconds = -seconds;
+ }
+ tstz.set(seconds, 0);
+ verifyConversion(tstz);
+ }
+
+ @Test
+ @Repeating(repetition = 10)
+ public void testSecondsWithNanos() {
+ long seconds = ThreadLocalRandom.current().nextLong(31556889864403199L);
+ if (ThreadLocalRandom.current().nextBoolean()) {
+ seconds = -seconds;
+ }
+
+ int nanos = ThreadLocalRandom.current().nextInt(999999999) + 1;
+
+ TimestampTZ tstz = new TimestampTZ(seconds, nanos);
+ verifyConversion(tstz);
+ }
+
+ @Test
+ public void testComparison() {
+ String s1 = "2017-04-14 18:00:00 Asia/Shanghai";
+ String s2 = "2017-04-14 10:00:00.00 GMT";
+ String s3 = "2017-04-14 18:00:00 UTC+08:00";
+ String s4 = "2017-04-14 18:00:00 Europe/London";
+ TimestampTZWritable writable1 = new TimestampTZWritable(TimestampTZ.parse(s1));
+ TimestampTZWritable writable2 = new TimestampTZWritable(TimestampTZ.parse(s2));
+ TimestampTZWritable writable3 = new TimestampTZWritable(TimestampTZ.parse(s3));
+ TimestampTZWritable writable4 = new TimestampTZWritable(TimestampTZ.parse(s4));
+
+ Assert.assertEquals(writable1, writable2);
+ Assert.assertEquals(writable1, writable3);
+ Assert.assertEquals(writable1.hashCode(), writable2.hashCode());
+ Assert.assertEquals(writable1.hashCode(), writable3.hashCode());
+ Assert.assertTrue(writable1.compareTo(writable4) < 0);
+
+ byte[] bs1 = writable1.toBinarySortable();
+ byte[] bs2 = writable2.toBinarySortable();
+ byte[] bs3 = writable3.toBinarySortable();
+ byte[] bs4 = writable4.toBinarySortable();
+ Assert.assertTrue(WritableComparator.compareBytes(bs1, 0, bs1.length, bs2, 0, bs2.length) == 0);
+ Assert.assertTrue(WritableComparator.compareBytes(bs1, 0, bs1.length, bs3, 0, bs3.length) == 0);
+ Assert.assertTrue(WritableComparator.compareBytes(bs1, 0, bs1.length, bs4, 0, bs4.length) < 0);
+ }
+
+ private static void verifyConversion(TimestampTZ srcTstz) {
+ TimestampTZWritable src = new TimestampTZWritable(srcTstz);
+ byte[] bytes = src.getBytes();
+ TimestampTZWritable dest = new TimestampTZWritable(bytes, 0);
+ TimestampTZ destTstz = dest.getTimestampTZ();
+ String errMsg = "Src tstz with seconds " + srcTstz.getEpochSecond() + ", nanos " +
+ srcTstz.getNanos() + ". Dest tstz with seconds " + destTstz.getEpochSecond() +
+ ", nanos " + destTstz.getNanos();
+ Assert.assertEquals(errMsg, srcTstz, destTstz);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/if/TCLIService.thrift
----------------------------------------------------------------------
diff --git a/service-rpc/if/TCLIService.thrift b/service-rpc/if/TCLIService.thrift
index 824b049..1609669 100644
--- a/service-rpc/if/TCLIService.thrift
+++ b/service-rpc/if/TCLIService.thrift
@@ -66,6 +66,9 @@ enum TProtocolVersion {
// V10 adds support for in place updates via GetOperationStatus
HIVE_CLI_SERVICE_PROTOCOL_V10
+
+ // V11 adds timestamp with time zone type
+ HIVE_CLI_SERVICE_PROTOCOL_V11
}
enum TTypeId {
@@ -90,7 +93,8 @@ enum TTypeId {
VARCHAR_TYPE,
CHAR_TYPE,
INTERVAL_YEAR_MONTH_TYPE,
- INTERVAL_DAY_TIME_TYPE
+ INTERVAL_DAY_TIME_TYPE,
+ TIMESTAMPTZ_TYPE
}
const set<TTypeId> PRIMITIVE_TYPES = [
@@ -110,7 +114,8 @@ const set<TTypeId> PRIMITIVE_TYPES = [
TTypeId.VARCHAR_TYPE,
TTypeId.CHAR_TYPE,
TTypeId.INTERVAL_YEAR_MONTH_TYPE,
- TTypeId.INTERVAL_DAY_TIME_TYPE
+ TTypeId.INTERVAL_DAY_TIME_TYPE,
+ TTypeId.TIMESTAMPTZ_TYPE
]
const set<TTypeId> COMPLEX_TYPES = [
@@ -148,6 +153,7 @@ const map<TTypeId,string> TYPE_NAMES = {
TTypeId.CHAR_TYPE: "CHAR"
TTypeId.INTERVAL_YEAR_MONTH_TYPE: "INTERVAL_YEAR_MONTH"
TTypeId.INTERVAL_DAY_TIME_TYPE: "INTERVAL_DAY_TIME"
+ TTypeId.TIMESTAMPTZ_TYPE: "TIMESTAMP WITH TIME ZONE"
}
// Thrift does not support recursively defined types or forward declarations,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
index 991cb2e..5ec3426 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
@@ -28,6 +28,7 @@ TCLIServiceConstants::TCLIServiceConstants() {
PRIMITIVE_TYPES.insert((TTypeId::type)19);
PRIMITIVE_TYPES.insert((TTypeId::type)20);
PRIMITIVE_TYPES.insert((TTypeId::type)21);
+ PRIMITIVE_TYPES.insert((TTypeId::type)22);
COMPLEX_TYPES.insert((TTypeId::type)10);
COMPLEX_TYPES.insert((TTypeId::type)11);
@@ -59,6 +60,7 @@ TCLIServiceConstants::TCLIServiceConstants() {
TYPE_NAMES.insert(std::make_pair((TTypeId::type)19, "CHAR"));
TYPE_NAMES.insert(std::make_pair((TTypeId::type)20, "INTERVAL_YEAR_MONTH"));
TYPE_NAMES.insert(std::make_pair((TTypeId::type)21, "INTERVAL_DAY_TIME"));
+ TYPE_NAMES.insert(std::make_pair((TTypeId::type)22, "TIMESTAMP WITH TIME ZONE"));
CHARACTER_MAXIMUM_LENGTH = "characterMaximumLength";
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
index b6995c4..61933e8 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
@@ -23,7 +23,8 @@ int _kTProtocolVersionValues[] = {
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V7,
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V8,
TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V9,
- TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V10
+ TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V10,
+ TProtocolVersion::HIVE_CLI_SERVICE_PROTOCOL_V11
};
const char* _kTProtocolVersionNames[] = {
"HIVE_CLI_SERVICE_PROTOCOL_V1",
@@ -35,9 +36,10 @@ const char* _kTProtocolVersionNames[] = {
"HIVE_CLI_SERVICE_PROTOCOL_V7",
"HIVE_CLI_SERVICE_PROTOCOL_V8",
"HIVE_CLI_SERVICE_PROTOCOL_V9",
- "HIVE_CLI_SERVICE_PROTOCOL_V10"
+ "HIVE_CLI_SERVICE_PROTOCOL_V10",
+ "HIVE_CLI_SERVICE_PROTOCOL_V11"
};
-const std::map<int, const char*> _TProtocolVersion_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(10, _kTProtocolVersionValues, _kTProtocolVersionNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _TProtocolVersion_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(11, _kTProtocolVersionValues, _kTProtocolVersionNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
int _kTTypeIdValues[] = {
TTypeId::BOOLEAN_TYPE,
@@ -61,7 +63,8 @@ int _kTTypeIdValues[] = {
TTypeId::VARCHAR_TYPE,
TTypeId::CHAR_TYPE,
TTypeId::INTERVAL_YEAR_MONTH_TYPE,
- TTypeId::INTERVAL_DAY_TIME_TYPE
+ TTypeId::INTERVAL_DAY_TIME_TYPE,
+ TTypeId::TIMESTAMPTZ_TYPE
};
const char* _kTTypeIdNames[] = {
"BOOLEAN_TYPE",
@@ -85,9 +88,10 @@ const char* _kTTypeIdNames[] = {
"VARCHAR_TYPE",
"CHAR_TYPE",
"INTERVAL_YEAR_MONTH_TYPE",
- "INTERVAL_DAY_TIME_TYPE"
+ "INTERVAL_DAY_TIME_TYPE",
+ "TIMESTAMPTZ_TYPE"
};
-const std::map<int, const char*> _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(22, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _TTypeId_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(23, _kTTypeIdValues, _kTTypeIdNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
int _kTStatusCodeValues[] = {
TStatusCode::SUCCESS_STATUS,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
index 8accf66..9483a2a 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
@@ -30,7 +30,8 @@ struct TProtocolVersion {
HIVE_CLI_SERVICE_PROTOCOL_V7 = 6,
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7,
HIVE_CLI_SERVICE_PROTOCOL_V9 = 8,
- HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
+ HIVE_CLI_SERVICE_PROTOCOL_V10 = 9,
+ HIVE_CLI_SERVICE_PROTOCOL_V11 = 10
};
};
@@ -59,7 +60,8 @@ struct TTypeId {
VARCHAR_TYPE = 18,
CHAR_TYPE = 19,
INTERVAL_YEAR_MONTH_TYPE = 20,
- INTERVAL_DAY_TIME_TYPE = 21
+ INTERVAL_DAY_TIME_TYPE = 21,
+ TIMESTAMPTZ_TYPE = 22
};
};
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
index 930bed7..3d15147 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TCLIServiceConstants.java
@@ -55,6 +55,7 @@ public class TCLIServiceConstants {
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.CHAR_TYPE);
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_YEAR_MONTH_TYPE);
PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_DAY_TIME_TYPE);
+ PRIMITIVE_TYPES.add(org.apache.hive.service.rpc.thrift.TTypeId.TIMESTAMPTZ_TYPE);
}
public static final Set<TTypeId> COMPLEX_TYPES = new HashSet<TTypeId>();
@@ -95,6 +96,7 @@ public class TCLIServiceConstants {
TYPE_NAMES.put(org.apache.hive.service.rpc.thrift.TTypeId.CHAR_TYPE, "CHAR");
TYPE_NAMES.put(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_YEAR_MONTH_TYPE, "INTERVAL_YEAR_MONTH");
TYPE_NAMES.put(org.apache.hive.service.rpc.thrift.TTypeId.INTERVAL_DAY_TIME_TYPE, "INTERVAL_DAY_TIME");
+ TYPE_NAMES.put(org.apache.hive.service.rpc.thrift.TTypeId.TIMESTAMPTZ_TYPE, "TIMESTAMP WITH TIME ZONE");
}
public static final String CHARACTER_MAXIMUM_LENGTH = "characterMaximumLength";
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
index 18a7825..18e3873 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TProtocolVersion.java
@@ -21,7 +21,8 @@ public enum TProtocolVersion implements org.apache.thrift.TEnum {
HIVE_CLI_SERVICE_PROTOCOL_V7(6),
HIVE_CLI_SERVICE_PROTOCOL_V8(7),
HIVE_CLI_SERVICE_PROTOCOL_V9(8),
- HIVE_CLI_SERVICE_PROTOCOL_V10(9);
+ HIVE_CLI_SERVICE_PROTOCOL_V10(9),
+ HIVE_CLI_SERVICE_PROTOCOL_V11(10);
private final int value;
@@ -62,6 +63,8 @@ public enum TProtocolVersion implements org.apache.thrift.TEnum {
return HIVE_CLI_SERVICE_PROTOCOL_V9;
case 9:
return HIVE_CLI_SERVICE_PROTOCOL_V10;
+ case 10:
+ return HIVE_CLI_SERVICE_PROTOCOL_V11;
default:
return null;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
index a3735eb..1b062b7 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TTypeId.java
@@ -33,7 +33,8 @@ public enum TTypeId implements org.apache.thrift.TEnum {
VARCHAR_TYPE(18),
CHAR_TYPE(19),
INTERVAL_YEAR_MONTH_TYPE(20),
- INTERVAL_DAY_TIME_TYPE(21);
+ INTERVAL_DAY_TIME_TYPE(21),
+ TIMESTAMPTZ_TYPE(22);
private final int value;
@@ -98,6 +99,8 @@ public enum TTypeId implements org.apache.thrift.TEnum {
return INTERVAL_YEAR_MONTH_TYPE;
case 21:
return INTERVAL_DAY_TIME_TYPE;
+ case 22:
+ return TIMESTAMPTZ_TYPE;
default:
return null;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-php/Types.php
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-php/Types.php b/service-rpc/src/gen/thrift/gen-php/Types.php
index ee5acd2..263b22c 100644
--- a/service-rpc/src/gen/thrift/gen-php/Types.php
+++ b/service-rpc/src/gen/thrift/gen-php/Types.php
@@ -26,6 +26,7 @@ final class TProtocolVersion {
const HIVE_CLI_SERVICE_PROTOCOL_V8 = 7;
const HIVE_CLI_SERVICE_PROTOCOL_V9 = 8;
const HIVE_CLI_SERVICE_PROTOCOL_V10 = 9;
+ const HIVE_CLI_SERVICE_PROTOCOL_V11 = 10;
static public $__names = array(
0 => 'HIVE_CLI_SERVICE_PROTOCOL_V1',
1 => 'HIVE_CLI_SERVICE_PROTOCOL_V2',
@@ -37,6 +38,7 @@ final class TProtocolVersion {
7 => 'HIVE_CLI_SERVICE_PROTOCOL_V8',
8 => 'HIVE_CLI_SERVICE_PROTOCOL_V9',
9 => 'HIVE_CLI_SERVICE_PROTOCOL_V10',
+ 10 => 'HIVE_CLI_SERVICE_PROTOCOL_V11',
);
}
@@ -63,6 +65,7 @@ final class TTypeId {
const CHAR_TYPE = 19;
const INTERVAL_YEAR_MONTH_TYPE = 20;
const INTERVAL_DAY_TIME_TYPE = 21;
+ const TIMESTAMPTZ_TYPE = 22;
static public $__names = array(
0 => 'BOOLEAN_TYPE',
1 => 'TINYINT_TYPE',
@@ -86,6 +89,7 @@ final class TTypeId {
19 => 'CHAR_TYPE',
20 => 'INTERVAL_YEAR_MONTH_TYPE',
21 => 'INTERVAL_DAY_TIME_TYPE',
+ 22 => 'TIMESTAMPTZ_TYPE',
);
}
@@ -10053,6 +10057,7 @@ final class Constant extends \Thrift\Type\TConstant {
19 => true,
20 => true,
21 => true,
+ 22 => true,
);
}
@@ -10096,6 +10101,7 @@ final class Constant extends \Thrift\Type\TConstant {
19 => "CHAR",
20 => "INTERVAL_YEAR_MONTH",
21 => "INTERVAL_DAY_TIME",
+ 22 => "TIMESTAMP WITH TIME ZONE",
);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py b/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
index c8d4f8f..edd1edd 100644
--- a/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
+++ b/service-rpc/src/gen/thrift/gen-py/TCLIService/constants.py
@@ -27,6 +27,7 @@ PRIMITIVE_TYPES = set([
19,
20,
21,
+ 22,
])
COMPLEX_TYPES = set([
10,
@@ -61,6 +62,7 @@ TYPE_NAMES = {
19 : "CHAR",
20 : "INTERVAL_YEAR_MONTH",
21 : "INTERVAL_DAY_TIME",
+ 22 : "TIMESTAMP WITH TIME ZONE",
}
CHARACTER_MAXIMUM_LENGTH = "characterMaximumLength"
PRECISION = "precision"
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
index e9faa2a..95bd9cc 100644
--- a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
+++ b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
@@ -27,6 +27,7 @@ class TProtocolVersion:
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7
HIVE_CLI_SERVICE_PROTOCOL_V9 = 8
HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
+ HIVE_CLI_SERVICE_PROTOCOL_V11 = 10
_VALUES_TO_NAMES = {
0: "HIVE_CLI_SERVICE_PROTOCOL_V1",
@@ -39,6 +40,7 @@ class TProtocolVersion:
7: "HIVE_CLI_SERVICE_PROTOCOL_V8",
8: "HIVE_CLI_SERVICE_PROTOCOL_V9",
9: "HIVE_CLI_SERVICE_PROTOCOL_V10",
+ 10: "HIVE_CLI_SERVICE_PROTOCOL_V11",
}
_NAMES_TO_VALUES = {
@@ -52,6 +54,7 @@ class TProtocolVersion:
"HIVE_CLI_SERVICE_PROTOCOL_V8": 7,
"HIVE_CLI_SERVICE_PROTOCOL_V9": 8,
"HIVE_CLI_SERVICE_PROTOCOL_V10": 9,
+ "HIVE_CLI_SERVICE_PROTOCOL_V11": 10,
}
class TTypeId:
@@ -77,6 +80,7 @@ class TTypeId:
CHAR_TYPE = 19
INTERVAL_YEAR_MONTH_TYPE = 20
INTERVAL_DAY_TIME_TYPE = 21
+ TIMESTAMPTZ_TYPE = 22
_VALUES_TO_NAMES = {
0: "BOOLEAN_TYPE",
@@ -101,6 +105,7 @@ class TTypeId:
19: "CHAR_TYPE",
20: "INTERVAL_YEAR_MONTH_TYPE",
21: "INTERVAL_DAY_TIME_TYPE",
+ 22: "TIMESTAMPTZ_TYPE",
}
_NAMES_TO_VALUES = {
@@ -126,6 +131,7 @@ class TTypeId:
"CHAR_TYPE": 19,
"INTERVAL_YEAR_MONTH_TYPE": 20,
"INTERVAL_DAY_TIME_TYPE": 21,
+ "TIMESTAMPTZ_TYPE": 22,
}
class TStatusCode:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
index 25adbb4..1f5b604 100644
--- a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
+++ b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
@@ -25,6 +25,7 @@ PRIMITIVE_TYPES = Set.new([
19,
20,
21,
+ 22,
])
COMPLEX_TYPES = Set.new([
@@ -62,6 +63,7 @@ TYPE_NAMES = {
19 => %q"CHAR",
20 => %q"INTERVAL_YEAR_MONTH",
21 => %q"INTERVAL_DAY_TIME",
+ 22 => %q"TIMESTAMP WITH TIME ZONE",
}
CHARACTER_MAXIMUM_LENGTH = %q"characterMaximumLength"
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
index 714309c..aa4940a 100644
--- a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
+++ b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
@@ -17,8 +17,9 @@ module TProtocolVersion
HIVE_CLI_SERVICE_PROTOCOL_V8 = 7
HIVE_CLI_SERVICE_PROTOCOL_V9 = 8
HIVE_CLI_SERVICE_PROTOCOL_V10 = 9
- VALUE_MAP = {0 => "HIVE_CLI_SERVICE_PROTOCOL_V1", 1 => "HIVE_CLI_SERVICE_PROTOCOL_V2", 2 => "HIVE_CLI_SERVICE_PROTOCOL_V3", 3 => "HIVE_CLI_SERVICE_PROTOCOL_V4", 4 => "HIVE_CLI_SERVICE_PROTOCOL_V5", 5 => "HIVE_CLI_SERVICE_PROTOCOL_V6", 6 => "HIVE_CLI_SERVICE_PROTOCOL_V7", 7 => "HIVE_CLI_SERVICE_PROTOCOL_V8", 8 => "HIVE_CLI_SERVICE_PROTOCOL_V9", 9 => "HIVE_CLI_SERVICE_PROTOCOL_V10"}
- VALID_VALUES = Set.new([HIVE_CLI_SERVICE_PROTOCOL_V1, HIVE_CLI_SERVICE_PROTOCOL_V2, HIVE_CLI_SERVICE_PROTOCOL_V3, HIVE_CLI_SERVICE_PROTOCOL_V4, HIVE_CLI_SERVICE_PROTOCOL_V5, HIVE_CLI_SERVICE_PROTOCOL_V6, HIVE_CLI_SERVICE_PROTOCOL_V7, HIVE_CLI_SERVICE_PROTOCOL_V8, HIVE_CLI_SERVICE_PROTOCOL_V9, HIVE_CLI_SERVICE_PROTOCOL_V10]).freeze
+ HIVE_CLI_SERVICE_PROTOCOL_V11 = 10
+ VALUE_MAP = {0 => "HIVE_CLI_SERVICE_PROTOCOL_V1", 1 => "HIVE_CLI_SERVICE_PROTOCOL_V2", 2 => "HIVE_CLI_SERVICE_PROTOCOL_V3", 3 => "HIVE_CLI_SERVICE_PROTOCOL_V4", 4 => "HIVE_CLI_SERVICE_PROTOCOL_V5", 5 => "HIVE_CLI_SERVICE_PROTOCOL_V6", 6 => "HIVE_CLI_SERVICE_PROTOCOL_V7", 7 => "HIVE_CLI_SERVICE_PROTOCOL_V8", 8 => "HIVE_CLI_SERVICE_PROTOCOL_V9", 9 => "HIVE_CLI_SERVICE_PROTOCOL_V10", 10 => "HIVE_CLI_SERVICE_PROTOCOL_V11"}
+ VALID_VALUES = Set.new([HIVE_CLI_SERVICE_PROTOCOL_V1, HIVE_CLI_SERVICE_PROTOCOL_V2, HIVE_CLI_SERVICE_PROTOCOL_V3, HIVE_CLI_SERVICE_PROTOCOL_V4, HIVE_CLI_SERVICE_PROTOCOL_V5, HIVE_CLI_SERVICE_PROTOCOL_V6, HIVE_CLI_SERVICE_PROTOCOL_V7, HIVE_CLI_SERVICE_PROTOCOL_V8, HIVE_CLI_SERVICE_PROTOCOL_V9, HIVE_CLI_SERVICE_PROTOCOL_V10, HIVE_CLI_SERVICE_PROTOCOL_V11]).freeze
end
module TTypeId
@@ -44,8 +45,9 @@ module TTypeId
CHAR_TYPE = 19
INTERVAL_YEAR_MONTH_TYPE = 20
INTERVAL_DAY_TIME_TYPE = 21
- VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE", 17 => "DATE_TYPE", 18 => "VARCHAR_TYPE", 19 => "CHAR_TYPE", 20 => "INTERVAL_YEAR_MONTH_TYPE", 21 => "INTERVAL_DAY_TIME_TYPE"}
- VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE, DATE_TYPE, VARCHAR_TYPE, CHAR_TYPE, INTERVAL_YEAR_MONTH_TYPE, INTERVAL_DAY_TIME_TYPE]).freeze
+ TIMESTAMPTZ_TYPE = 22
+ VALUE_MAP = {0 => "BOOLEAN_TYPE", 1 => "TINYINT_TYPE", 2 => "SMALLINT_TYPE", 3 => "INT_TYPE", 4 => "BIGINT_TYPE", 5 => "FLOAT_TYPE", 6 => "DOUBLE_TYPE", 7 => "STRING_TYPE", 8 => "TIMESTAMP_TYPE", 9 => "BINARY_TYPE", 10 => "ARRAY_TYPE", 11 => "MAP_TYPE", 12 => "STRUCT_TYPE", 13 => "UNION_TYPE", 14 => "USER_DEFINED_TYPE", 15 => "DECIMAL_TYPE", 16 => "NULL_TYPE", 17 => "DATE_TYPE", 18 => "VARCHAR_TYPE", 19 => "CHAR_TYPE", 20 => "INTERVAL_YEAR_MONTH_TYPE", 21 => "INTERVAL_DAY_TIME_TYPE", 22 => "TIMESTAMPTZ_TYPE"}
+ VALID_VALUES = Set.new([BOOLEAN_TYPE, TINYINT_TYPE, SMALLINT_TYPE, INT_TYPE, BIGINT_TYPE, FLOAT_TYPE, DOUBLE_TYPE, STRING_TYPE, TIMESTAMP_TYPE, BINARY_TYPE, ARRAY_TYPE, MAP_TYPE, STRUCT_TYPE, UNION_TYPE, USER_DEFINED_TYPE, DECIMAL_TYPE, NULL_TYPE, DATE_TYPE, VARCHAR_TYPE, CHAR_TYPE, INTERVAL_YEAR_MONTH_TYPE, INTERVAL_DAY_TIME_TYPE, TIMESTAMPTZ_TYPE]).freeze
end
module TStatusCode
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service/src/java/org/apache/hive/service/cli/ColumnValue.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/ColumnValue.java b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
index 76e8c03..732bc9d 100644
--- a/service/src/java/org/apache/hive/service/cli/ColumnValue.java
+++ b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.rpc.thrift.TBoolValue;
import org.apache.hive.service.rpc.thrift.TByteValue;
@@ -139,6 +140,14 @@ public class ColumnValue {
return TColumnValue.stringVal(tStringValue);
}
+ private static TColumnValue timestampTZValue(TimestampTZ value) {
+ TStringValue tStringValue = new TStringValue();
+ if (value != null) {
+ tStringValue.setValue(value.toString());
+ }
+ return TColumnValue.stringVal(tStringValue);
+ }
+
private static TColumnValue stringValue(HiveDecimal value, TypeDescriptor typeDescriptor) {
TStringValue tStrValue = new TStringValue();
if (value != null) {
@@ -192,6 +201,8 @@ public class ColumnValue {
return dateValue((Date)value);
case TIMESTAMP_TYPE:
return timestampValue((Timestamp)value);
+ case TIMESTAMPTZ_TYPE:
+ return timestampTZValue((TimestampTZ) value);
case INTERVAL_YEAR_MONTH_TYPE:
return stringValue((HiveIntervalYearMonth) value);
case INTERVAL_DAY_TIME_TYPE:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
index d634bef..ed903bc 100644
--- a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
+++ b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
@@ -116,6 +116,8 @@ public class TypeDescriptor {
return 10;
case TIMESTAMP_TYPE:
return 29;
+ case TIMESTAMPTZ_TYPE:
+ return 31;
default:
return null;
}
[3/3] hive git commit: HIVE-14412: Add timestamp with time zone (Rui
Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin,
Ashutosh Chauhan)
Posted by li...@apache.org.
HIVE-14412: Add timestamp with time zone (Rui Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin, Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6b6a00ff
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6b6a00ff
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6b6a00ff
Branch: refs/heads/master
Commit: 6b6a00ffb0dae651ef407a99bab00d5e74f0d6aa
Parents: ee91b8e
Author: Rui Li <li...@apache.org>
Authored: Thu May 11 14:46:57 2017 +0800
Committer: Rui Li <li...@apache.org>
Committed: Thu May 11 14:46:57 2017 +0800
----------------------------------------------------------------------
.../hadoop/hive/common/type/TimestampTZ.java | 197 +++++++++
.../hive/common/type/TestTimestampTZ.java | 102 +++++
.../test/queries/clientnegative/serde_regex.q | 4 +-
.../test/queries/clientpositive/serde_regex.q | 6 +-
.../results/clientnegative/serde_regex.q.out | 6 +-
.../results/clientpositive/serde_regex.q.out | 12 +-
.../src/test/queries/positive/hbase_timestamp.q | 26 +-
.../test/results/positive/hbase_timestamp.q.out | 52 +--
.../queries/clientpositive/orc_format_part.q | 12 +-
.../clientpositive/orc_nonstd_partitions_loc.q | 14 +-
.../queries/clientpositive/rcfile_format_part.q | 12 +-
.../rcfile_nonstd_partitions_loc.q | 14 +-
.../clientpositive/orc_format_part.q.out | 24 +-
.../orc_nonstd_partitions_loc.q.out | 28 +-
.../clientpositive/rcfile_format_part.q.out | 24 +-
.../rcfile_nonstd_partitions_loc.q.out | 28 +-
.../org/apache/hive/jdbc/HiveBaseResultSet.java | 3 +
.../java/org/apache/hive/jdbc/JdbcColumn.java | 11 +
.../hadoop/hive/ql/exec/FunctionRegistry.java | 4 +-
.../hadoop/hive/ql/exec/GroupByOperator.java | 4 +-
.../hive/ql/exec/SerializationUtilities.java | 18 +
.../calcite/translator/TypeConverter.java | 3 +
.../hive/ql/parse/DDLSemanticAnalyzer.java | 1 +
.../org/apache/hadoop/hive/ql/parse/HiveLexer.g | 3 +
.../apache/hadoop/hive/ql/parse/HiveParser.g | 6 +
.../hadoop/hive/ql/parse/IdentifiersParser.g | 2 +
.../hive/ql/parse/TypeCheckProcFactory.java | 2 +
.../apache/hadoop/hive/ql/stats/StatsUtils.java | 16 +-
.../apache/hadoop/hive/ql/udf/UDFToBoolean.java | 1 +
.../apache/hadoop/hive/ql/udf/UDFToString.java | 10 +
.../hadoop/hive/ql/udf/generic/GenericUDF.java | 3 +
.../hive/ql/udf/generic/GenericUDFDate.java | 2 +
.../ql/udf/generic/GenericUDFToTimestampTZ.java | 89 ++++
.../TestSQL11ReservedKeyWordsNegative.java | 17 +-
.../test/queries/clientnegative/serde_regex.q | 2 +-
.../test/queries/clientnegative/serde_regex2.q | 4 +-
.../test/queries/clientnegative/serde_regex3.q | 2 +-
.../test/queries/clientpositive/create_like.q | 2 +-
ql/src/test/queries/clientpositive/join43.q | 38 +-
.../test/queries/clientpositive/serde_regex.q | 8 +-
.../test/queries/clientpositive/timestamptz.q | 11 +
.../test/queries/clientpositive/timestamptz_1.q | 25 ++
.../test/queries/clientpositive/timestamptz_2.q | 19 +
.../results/clientnegative/serde_regex.q.out | 2 +-
.../results/clientnegative/serde_regex2.q.out | 6 +-
.../results/clientnegative/serde_regex3.q.out | 2 +-
.../results/clientpositive/create_like.q.out | 4 +-
ql/src/test/results/clientpositive/join43.q.out | 76 ++--
.../results/clientpositive/serde_regex.q.out | 16 +-
.../results/clientpositive/timestamptz.q.out | 124 ++++++
.../results/clientpositive/timestamptz_1.q.out | 156 +++++++
.../results/clientpositive/timestamptz_2.q.out | 78 ++++
serde/if/serde.thrift | 2 +
.../src/gen/thrift/gen-cpp/serde_constants.cpp | 3 +
serde/src/gen/thrift/gen-cpp/serde_constants.h | 1 +
.../hadoop/hive/serde/serdeConstants.java | 3 +
.../org/apache/hadoop/hive/serde/Types.php | 6 +
.../org_apache_hadoop_hive_serde/constants.py | 2 +
serde/src/gen/thrift/gen-rb/serde_constants.rb | 3 +
.../apache/hadoop/hive/serde2/SerDeUtils.java | 7 +
.../binarysortable/BinarySortableSerDe.java | 26 +-
.../hive/serde2/io/TimestampTZWritable.java | 427 +++++++++++++++++++
.../hadoop/hive/serde2/lazy/LazyFactory.java | 3 +
.../hive/serde2/lazy/LazyTimestampTZ.java | 91 ++++
.../hadoop/hive/serde2/lazy/LazyUtils.java | 11 +-
.../LazyPrimitiveObjectInspectorFactory.java | 4 +
.../LazyTimestampTZObjectInspector.java | 43 ++
.../serde2/lazybinary/LazyBinaryFactory.java | 3 +
.../hive/serde2/lazybinary/LazyBinarySerDe.java | 7 +
.../lazybinary/LazyBinaryTimestampTZ.java | 36 ++
.../hive/serde2/lazybinary/LazyBinaryUtils.java | 5 +
.../ObjectInspectorConverters.java | 4 +
.../objectinspector/ObjectInspectorUtils.java | 19 +
.../PrimitiveObjectInspector.java | 4 +-
.../JavaTimestampTZObjectInspector.java | 76 ++++
.../PrimitiveObjectInspectorConverter.java | 26 ++
.../PrimitiveObjectInspectorFactory.java | 14 +
.../PrimitiveObjectInspectorUtils.java | 104 +++--
.../SettableTimestampTZObjectInspector.java | 34 ++
.../primitive/TimestampTZObjectInspector.java | 29 ++
...tableConstantTimestampTZObjectInspector.java | 36 ++
.../WritableTimestampTZObjectInspector.java | 79 ++++
.../apache/hadoop/hive/serde2/thrift/Type.java | 8 +
.../hive/serde2/typeinfo/TypeInfoFactory.java | 3 +
.../hive/serde2/io/TestTimestampTZWritable.java | 102 +++++
service-rpc/if/TCLIService.thrift | 10 +-
.../thrift/gen-cpp/TCLIService_constants.cpp | 2 +
.../gen/thrift/gen-cpp/TCLIService_types.cpp | 16 +-
.../src/gen/thrift/gen-cpp/TCLIService_types.h | 6 +-
.../rpc/thrift/TCLIServiceConstants.java | 2 +
.../service/rpc/thrift/TProtocolVersion.java | 5 +-
.../apache/hive/service/rpc/thrift/TTypeId.java | 5 +-
service-rpc/src/gen/thrift/gen-php/Types.php | 6 +
.../gen/thrift/gen-py/TCLIService/constants.py | 2 +
.../src/gen/thrift/gen-py/TCLIService/ttypes.py | 6 +
.../thrift/gen-rb/t_c_l_i_service_constants.rb | 2 +
.../gen/thrift/gen-rb/t_c_l_i_service_types.rb | 10 +-
.../apache/hive/service/cli/ColumnValue.java | 11 +
.../apache/hive/service/cli/TypeDescriptor.java | 2 +
99 files changed, 2360 insertions(+), 277 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
new file mode 100644
index 0000000..ed83871
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZ.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.time.DateTimeException;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalTime;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
+import java.time.format.TextStyle;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * This is the internal type for Timestamp with time zone.
+ * A wrapper of ZonedDateTime which automatically convert the Zone to UTC.
+ * The full qualified input format of Timestamp with time zone is
+ * "yyyy-MM-dd HH:mm:ss[.SSS...] zoneid/zoneoffset", where the time and zone parts are optional.
+ * If time part is absent, a default '00:00:00.0' will be used.
+ * If zone part is absent, the system time zone will be used.
+ * All timestamp with time zone will be converted and stored as UTC retaining the instant.
+ * E.g. "2017-04-14 18:00:00 Asia/Shanghai" will be converted to
+ * "2017-04-14 10:00:00.0 Z".
+ */
+public class TimestampTZ implements Comparable<TimestampTZ> {
+
+ private static final DateTimeFormatter formatter;
+ private static final ZoneId UTC = ZoneOffset.UTC;
+ private static final ZonedDateTime EPOCH = ZonedDateTime.ofInstant(Instant.EPOCH, UTC);
+ private static final LocalTime DEFAULT_LOCAL_TIME = LocalTime.of(0, 0);
+ private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d");
+ private static final Logger LOG = LoggerFactory.getLogger(TimestampTZ.class);
+
+ private static final ThreadLocal<DateFormat> CONVERT_FORMATTER =
+ ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
+
+ static {
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+ // Date part
+ builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
+ // Time part
+ builder.optionalStart().appendLiteral(" ").append(DateTimeFormatter.ofPattern("HH:mm:ss")).
+ optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).
+ optionalEnd().optionalEnd();
+
+ // Zone part
+ builder.optionalStart().appendLiteral(" ").optionalEnd();
+ builder.optionalStart().appendZoneText(TextStyle.NARROW).optionalEnd();
+
+ formatter = builder.toFormatter();
+ }
+
+ private ZonedDateTime zonedDateTime;
+
+ public TimestampTZ() {
+ this(EPOCH);
+ }
+
+ public TimestampTZ(ZonedDateTime zonedDateTime) {
+ setZonedDateTime(zonedDateTime);
+ }
+
+ public TimestampTZ(long seconds, int nanos) {
+ set(seconds, nanos);
+ }
+
+ public void set(long seconds, int nanos) {
+ Instant instant = Instant.ofEpochSecond(seconds, nanos);
+ setZonedDateTime(ZonedDateTime.ofInstant(instant, UTC));
+ }
+
+ public ZonedDateTime getZonedDateTime() {
+ return zonedDateTime;
+ }
+
+ public void setZonedDateTime(ZonedDateTime zonedDateTime) {
+ this.zonedDateTime = zonedDateTime != null ? zonedDateTime.withZoneSameInstant(UTC) : EPOCH;
+ }
+
+ @Override
+ public String toString() {
+ return zonedDateTime.format(formatter);
+ }
+
+ @Override
+ public int hashCode() {
+ return zonedDateTime.toInstant().hashCode();
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (other instanceof TimestampTZ) {
+ return compareTo((TimestampTZ) other) == 0;
+ }
+ return false;
+ }
+
+ @Override
+ public int compareTo(TimestampTZ o) {
+ return zonedDateTime.toInstant().compareTo(o.zonedDateTime.toInstant());
+ }
+
+ public long getEpochSecond() {
+ return zonedDateTime.toInstant().getEpochSecond();
+ }
+
+ public int getNanos() {
+ return zonedDateTime.toInstant().getNano();
+ }
+
+ public static TimestampTZ parse(String s) {
+ // need to handle offset with single digital hour, see JDK-8066806
+ s = handleSingleDigitHourOffset(s);
+ ZonedDateTime zonedDateTime;
+ try {
+ zonedDateTime = ZonedDateTime.parse(s, formatter);
+ } catch (DateTimeParseException e) {
+ // try to be more tolerant
+ // if the input is invalid instead of incomplete, we'll hit exception here again
+ TemporalAccessor accessor = formatter.parse(s);
+ // LocalDate must be present
+ LocalDate localDate = LocalDate.from(accessor);
+ LocalTime localTime;
+ ZoneId zoneId;
+ try {
+ localTime = LocalTime.from(accessor);
+ } catch (DateTimeException e1) {
+ localTime = DEFAULT_LOCAL_TIME;
+ }
+ try {
+ zoneId = ZoneId.from(accessor);
+ } catch (DateTimeException e2) {
+ // TODO: in future this may come from user specified zone (via set time zone command)
+ zoneId = ZoneId.systemDefault();
+ }
+ zonedDateTime = ZonedDateTime.of(localDate, localTime, zoneId);
+ }
+
+ return new TimestampTZ(zonedDateTime);
+ }
+
+ private static String handleSingleDigitHourOffset(String s) {
+ Matcher matcher = SINGLE_DIGIT_PATTERN.matcher(s);
+ if (matcher.find()) {
+ int index = matcher.start() + 1;
+ s = s.substring(0, index) + "0" + s.substring(index, s.length());
+ }
+ return s;
+ }
+
+ public static TimestampTZ parseOrNull(String s) {
+ try {
+ return parse(s);
+ } catch (DateTimeParseException e) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Invalid string " + s + " for TIMESTAMP WITH TIME ZONE", e);
+ }
+ return null;
+ }
+ }
+
+ // Converts Date to TimestampTZ. The conversion is done text-wise since
+ // Date/Timestamp should be treated as description of date/time.
+ public static TimestampTZ convert(java.util.Date date) {
+ String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date);
+ // TODO: in future this may come from user specified zone (via set time zone command)
+ return parse(s + " " + ZoneId.systemDefault().getId());
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
----------------------------------------------------------------------
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
new file mode 100644
index 0000000..739850a
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
@@ -0,0 +1,102 @@
+package org.apache.hadoop.hive.common.type;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.sql.Timestamp;
+import java.time.format.DateTimeParseException;
+import java.util.TimeZone;
+
+public class TestTimestampTZ {
+ @Test
+ public void testConvertToUTC() {
+ String s = "2017-04-14 18:00:00 Asia/Shanghai";
+ TimestampTZ timestampTZ = TimestampTZ.parse(s);
+ Assert.assertEquals("2017-04-14 10:00:00.0 Z", timestampTZ.toString());
+ }
+
+ @Test
+ public void testComparison() {
+ String s1 = "2017-04-14 18:00:00 Asia/Shanghai";
+ String s2 = "2017-04-14 10:00:00.00 GMT";
+ String s3 = "2017-04-14 18:00:00 UTC+08:00";
+ String s4 = "2017-04-14 18:00:00 Europe/London";
+ TimestampTZ tstz1 = TimestampTZ.parse(s1);
+ TimestampTZ tstz2 = TimestampTZ.parse(s2);
+ TimestampTZ tstz3 = TimestampTZ.parse(s3);
+ TimestampTZ tstz4 = TimestampTZ.parse(s4);
+
+ Assert.assertEquals(tstz1, tstz2);
+ Assert.assertEquals(tstz1, tstz3);
+ Assert.assertEquals(tstz1.hashCode(), tstz2.hashCode());
+ Assert.assertEquals(tstz1.hashCode(), tstz3.hashCode());
+ Assert.assertTrue(tstz1.compareTo(tstz4) < 0);
+ }
+
+ @Test
+ public void testDST() {
+ String s1 = "2005-04-03 02:01:00 America/Los_Angeles";
+ String s2 = "2005-04-03 03:01:00 America/Los_Angeles";
+ Assert.assertEquals(TimestampTZ.parse(s1), TimestampTZ.parse(s2));
+ }
+
+ @Test
+ public void testFromToInstant() {
+ String s1 = "2017-04-14 18:00:00 UTC";
+ TimestampTZ tstz = TimestampTZ.parse(s1);
+ long seconds = tstz.getEpochSecond();
+ int nanos = tstz.getNanos();
+ Assert.assertEquals(tstz, new TimestampTZ(seconds, nanos));
+
+ nanos += 123000000;
+ Assert.assertEquals("2017-04-14 18:00:00.123 Z", new TimestampTZ(seconds, nanos).toString());
+
+ seconds -= 3;
+ Assert.assertEquals("2017-04-14 17:59:57.123 Z", new TimestampTZ(seconds, nanos).toString());
+ }
+
+ @Test
+ public void testVariations() {
+ // Omitting zone or time part is allowed
+ TimestampTZ.parse("2017-01-01 13:33:00");
+ TimestampTZ.parse("2017-11-08 Europe/London");
+ TimestampTZ.parse("2017-05-20");
+ TimestampTZ.parse("2017-11-08GMT");
+ TimestampTZ.parse("2017-10-11 GMT+8:00");
+ TimestampTZ.parse("2017-05-08 07:45:00-3:00");
+ }
+
+ @Test
+ public void testInvalidStrings() {
+ // invalid zone
+ try {
+ TimestampTZ.parse("2017-01-01 13:33:00 foo");
+ Assert.fail("Invalid timezone ID should cause exception");
+ } catch (DateTimeParseException e) {
+ // expected
+ }
+ // invalid time part
+ try {
+ TimestampTZ.parse("2017-01-01 13:33:61");
+ Assert.fail("Invalid time should cause exception");
+ } catch (DateTimeParseException e) {
+ // expected
+ }
+ }
+
+ @Test
+ public void testConvertFromTimestamp() {
+ TimeZone defaultZone = TimeZone.getDefault();
+ try {
+ // Use system zone when converting from timestamp to timestamptz
+ String s = "2017-06-12 23:12:56.34";
+ TimeZone.setDefault(TimeZone.getTimeZone("Europe/London"));
+ TimestampTZ tstz1 = TimestampTZ.convert(Timestamp.valueOf(s));
+ TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
+ TimestampTZ tstz2 = TimestampTZ.convert(Timestamp.valueOf(s));
+ Assert.assertTrue(tstz1.compareTo(tstz2) < 0);
+ } finally {
+ TimeZone.setDefault(defaultZone);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/queries/clientnegative/serde_regex.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientnegative/serde_regex.q b/contrib/src/test/queries/clientnegative/serde_regex.q
index a676338..9d27768 100644
--- a/contrib/src/test/queries/clientnegative/serde_regex.q
+++ b/contrib/src/test/queries/clientnegative/serde_regex.q
@@ -8,7 +8,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status INT,
size INT,
@@ -25,7 +25,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status INT,
size INT,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/queries/clientpositive/serde_regex.q
----------------------------------------------------------------------
diff --git a/contrib/src/test/queries/clientpositive/serde_regex.q b/contrib/src/test/queries/clientpositive/serde_regex.q
index d75d607..8aa3eda 100644
--- a/contrib/src/test/queries/clientpositive/serde_regex.q
+++ b/contrib/src/test/queries/clientpositive/serde_regex.q
@@ -6,7 +6,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -23,7 +23,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -39,4 +39,4 @@ STORED AS TEXTFILE;
LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
-SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
+SELECT * FROM serde_regex ORDER BY `time`;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/results/clientnegative/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/contrib/src/test/results/clientnegative/serde_regex.q.out b/contrib/src/test/results/clientnegative/serde_regex.q.out
index 58b1c02..5c5f594 100644
--- a/contrib/src/test/results/clientnegative/serde_regex.q.out
+++ b/contrib/src/test/results/clientnegative/serde_regex.q.out
@@ -9,7 +9,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status INT,
size INT,
@@ -27,7 +27,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status INT,
size INT,
@@ -60,7 +60,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status INT,
size INT,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/contrib/src/test/results/clientpositive/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/contrib/src/test/results/clientpositive/serde_regex.q.out b/contrib/src/test/results/clientpositive/serde_regex.q.out
index 2984293..1ce89e1 100644
--- a/contrib/src/test/results/clientpositive/serde_regex.q.out
+++ b/contrib/src/test/results/clientpositive/serde_regex.q.out
@@ -3,7 +3,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -21,7 +21,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -54,7 +54,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -73,7 +73,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -104,11 +104,11 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
PREHOOK: type: QUERY
PREHOOK: Input: default@serde_regex
#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM serde_regex ORDER BY time
+POSTHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
POSTHOOK: type: QUERY
POSTHOOK: Input: default@serde_regex
#### A masked pattern was here ####
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/hbase-handler/src/test/queries/positive/hbase_timestamp.q
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/queries/positive/hbase_timestamp.q b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
index 0350afe..6ae2c30 100644
--- a/hbase-handler/src/test/queries/positive/hbase_timestamp.q
+++ b/hbase-handler/src/test/queries/positive/hbase_timestamp.q
@@ -1,5 +1,5 @@
DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time timestamp)
+CREATE TABLE hbase_table (key string, value string, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
DESC extended hbase_table;
@@ -7,14 +7,14 @@ FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, "2012-02-23 10:14
SELECT * FROM hbase_table;
DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time bigint)
+CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754000 WHERE (key % 17) = 0;
-SELECT key, value, cast(time as timestamp) FROM hbase_table;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table;
DROP TABLE hbase_table;
-CREATE TABLE hbase_table (key string, value string, time bigint)
+CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
insert overwrite table hbase_table select key,value,ts FROM
@@ -25,23 +25,23 @@ insert overwrite table hbase_table select key,value,ts FROM
) T;
explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000;
explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000;
explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000;
explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000;
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000;
DROP TABLE hbase_table;
-CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp");
FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, MAP("name", CONCAT(value, " Jr")), "2012-02-23 10:14:52" WHERE (key % 17) = 0;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 3918121..e719b08 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -2,13 +2,13 @@ PREHOOK: query: DROP TABLE hbase_table
PREHOOK: type: DROPTABLE
POSTHOOK: query: DROP TABLE hbase_table
POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time timestamp)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time timestamp)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
POSTHOOK: type: CREATETABLE
@@ -69,13 +69,13 @@ POSTHOOK: query: DROP TABLE hbase_table
POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@hbase_table
POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
POSTHOOK: type: CREATETABLE
@@ -89,11 +89,11 @@ POSTHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value,
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@hbase_table
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table
#### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table
#### A masked pattern was here ####
@@ -125,13 +125,13 @@ POSTHOOK: query: DROP TABLE hbase_table
POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@hbase_table
POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+PREHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, time bigint)
+POSTHOOK: query: CREATE TABLE hbase_table (key string, value string, `time` bigint)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp")
POSTHOOK: type: CREATETABLE
@@ -156,10 +156,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
POSTHOOK: Output: default@hbase_table
PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
PREHOOK: type: QUERY
POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -193,21 +193,21 @@ STAGE PLANS:
Processor Tree:
ListSink
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table
#### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` < 200000000000
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table
#### A masked pattern was here ####
165 val_165 1973-03-03 01:46:40
396 val_396 1973-03-03 01:46:40
PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
PREHOOK: type: QUERY
POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -241,11 +241,11 @@ STAGE PLANS:
Processor Tree:
ListSink
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table
#### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` > 100000000000
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table
#### A masked pattern was here ####
@@ -254,10 +254,10 @@ POSTHOOK: Input: default@hbase_table
296 val_296 1976-05-03 12:33:20
333 val_333 1976-05-03 12:33:20
PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
PREHOOK: type: QUERY
POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -291,21 +291,21 @@ STAGE PLANS:
Processor Tree:
ListSink
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table
#### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` <= 100000000000
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table
#### A masked pattern was here ####
165 val_165 1973-03-03 01:46:40
396 val_396 1973-03-03 01:46:40
PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
PREHOOK: type: QUERY
POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
Stage-1 is a root stage
@@ -339,11 +339,11 @@ STAGE PLANS:
Processor Tree:
ListSink
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+PREHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
PREHOOK: type: QUERY
PREHOOK: Input: default@hbase_table
#### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+POSTHOOK: query: SELECT key, value, cast(`time` as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND `time` >= 200000000000
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hbase_table
#### A masked pattern was here ####
@@ -359,13 +359,13 @@ POSTHOOK: query: DROP TABLE hbase_table
POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@hbase_table
POSTHOOK: Output: default@hbase_table
-PREHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+PREHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@hbase_table
-POSTHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, time timestamp)
+POSTHOOK: query: CREATE TABLE hbase_table(key string, value map<string, string>, `time` timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:,:timestamp")
POSTHOOK: type: CREATETABLE
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q b/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
index 358eccd..20a0f09 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/orc_format_part.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -23,7 +23,7 @@ DROP TABLE orc_events;
CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -46,22 +46,22 @@ SELECT COUNT(*) FROM orc_events WHERE run_date=20120921;
SELECT COUNT(*) FROM orc_events WHERE run_date=20121121;
INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS orc_events;
SELECT COUNT(*) FROM orc_events;
INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39;
SELECT COUNT(*) FROM orc_events;
INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
SELECT COUNT(*) FROM orc_events;
INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
SELECT COUNT(*) FROM orc_events;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q b/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
index c462538..7e726fb 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/orc_nonstd_partitions_loc.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -23,7 +23,7 @@ DROP TABLE orc_events;
CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -47,12 +47,12 @@ SELECT COUNT(*) FROM orc_events;
ALTER TABLE orc_events ADD PARTITION (run_date=201211, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-1/';
INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS orc_events;
SELECT COUNT(*) FROM orc_events;
INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS orc_events;
SELECT COUNT(*) FROM orc_events;
@@ -63,10 +63,10 @@ SET hive.merge.mapfiles=false;
ALTER TABLE orc_events ADD PARTITION (run_date=201209, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-2/';
INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
SHOW PARTITIONS orc_events;
SELECT COUNT(*) FROM orc_events;
@@ -82,7 +82,7 @@ SET hive.merge.mapfiles=true;
ALTER TABLE orc_events ADD PARTITION (run_date=201207, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/orc_nonstd_partitions_loc/orc_nonstd_loc/ns-part-3/';
INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
SHOW PARTITIONS orc_events;
SELECT COUNT(*) FROM orc_events;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
index c563d3a..1aa8c91 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_format_part.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -23,7 +23,7 @@ DROP TABLE rcfile_events;
CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -46,22 +46,22 @@ SELECT COUNT(*) FROM rcfile_events WHERE run_date=20120921;
SELECT COUNT(*) FROM rcfile_events WHERE run_date=20121121;
INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS rcfile_events;
SELECT COUNT(*) FROM rcfile_events;
INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39;
SELECT COUNT(*) FROM rcfile_events;
INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
SELECT COUNT(*) FROM rcfile_events;
INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change';
SELECT COUNT(*) FROM rcfile_events;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
index d17c281..a37317f 100644
--- a/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
+++ b/itests/hive-blobstore/src/test/queries/clientpositive/rcfile_nonstd_partitions_loc.q
@@ -4,7 +4,7 @@ DROP TABLE src_events;
CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -23,7 +23,7 @@ DROP TABLE rcfile_events;
CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -47,12 +47,12 @@ SELECT COUNT(*) FROM rcfile_events;
ALTER TABLE rcfile_events ADD PARTITION (run_date=201211, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-1/';
INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS rcfile_events;
SELECT COUNT(*) FROM rcfile_events;
INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211';
SHOW PARTITIONS rcfile_events;
SELECT COUNT(*) FROM rcfile_events;
@@ -63,10 +63,10 @@ SET hive.merge.mapfiles=false;
ALTER TABLE rcfile_events ADD PARTITION (run_date=201209, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-2/';
INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
SHOW PARTITIONS rcfile_events;
SELECT COUNT(*) FROM rcfile_events;
@@ -82,7 +82,7 @@ SET hive.merge.mapfiles=true;
ALTER TABLE rcfile_events ADD PARTITION (run_date=201207, game_id=39, event_name='hq_change')
LOCATION '${hiveconf:test.blobstore.path.unique}/rcfile_nonstd_partitions_loc/rcfile_nonstd_loc/ns-part-3/';
INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209';
SHOW PARTITIONS rcfile_events;
SELECT COUNT(*) FROM rcfile_events;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out b/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
index 5d1319f..1ef9810 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/orc_format_part.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
POSTHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@orc_events
POSTHOOK: query: CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -163,13 +163,13 @@ POSTHOOK: Input: default@orc_events
#### A masked pattern was here ####
100
PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201211
POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -202,13 +202,13 @@ POSTHOOK: Input: default@orc_events
#### A masked pattern was here ####
300
PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201209/game_id=39
POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -231,13 +231,13 @@ POSTHOOK: Input: default@orc_events
#### A masked pattern was here ####
350
PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -252,13 +252,13 @@ POSTHOOK: Input: default@orc_events
#### A masked pattern was here ####
400
PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out b/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
index 70e72f7..9de4190 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/orc_nonstd_partitions_loc.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
POSTHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@orc_events
POSTHOOK: query: CREATE TABLE orc_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -156,13 +156,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
POSTHOOK: Output: default@orc_events
POSTHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -187,13 +187,13 @@ POSTHOOK: Input: default@orc_events
#### A masked pattern was here ####
300
PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201211/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -229,13 +229,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
POSTHOOK: Output: default@orc_events
POSTHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
PREHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT OVERWRITE TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -249,13 +249,13 @@ POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq
POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).uid SIMPLE [(src_events)src_events.FieldSchema(name:uid, type:bigint, comment:null), ]
POSTHOOK: Lineage: orc_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).user_id SIMPLE [(src_events)src_events.FieldSchema(name:user_id, type:bigint, comment:null), ]
PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -356,13 +356,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/orc_nonstd_partitions_loc/orc_nonst
POSTHOOK: Output: default@orc_events
POSTHOOK: Output: default@orc_events@run_date=201207/game_id=39/event_name=hq_change
PREHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@orc_events@run_date=201207/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE orc_events PARTITION (run_date=201207, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
index bed10ab..defca3b 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_format_part.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
POSTHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@rcfile_events
POSTHOOK: query: CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -163,13 +163,13 @@ POSTHOOK: Input: default@rcfile_events
#### A masked pattern was here ####
100
PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201211
POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,game_id,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -202,13 +202,13 @@ POSTHOOK: Input: default@rcfile_events
#### A masked pattern was here ####
300
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name)
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid,event_name FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -231,13 +231,13 @@ POSTHOOK: Input: default@rcfile_events
#### A masked pattern was here ####
350
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -252,13 +252,13 @@ POSTHOOK: Input: default@rcfile_events
#### A masked pattern was here ####
400
PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209' AND game_id=39 AND event_name='hq_change'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
----------------------------------------------------------------------
diff --git a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
index c6442f9..5db9c7e 100644
--- a/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
+++ b/itests/hive-blobstore/src/test/results/clientpositive/rcfile_nonstd_partitions_loc.q.out
@@ -5,7 +5,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -25,7 +25,7 @@ PREHOOK: Output: default@src_events
POSTHOOK: query: CREATE TABLE src_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -57,7 +57,7 @@ POSTHOOK: type: DROPTABLE
PREHOOK: query: CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -75,7 +75,7 @@ PREHOOK: Output: default@rcfile_events
POSTHOOK: query: CREATE TABLE rcfile_events
(
log_id BIGINT,
- time BIGINT,
+ `time` BIGINT,
uid BIGINT,
user_id BIGINT,
type INT,
@@ -156,13 +156,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
POSTHOOK: Output: default@rcfile_events
POSTHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
PREHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT OVERWRITE TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -187,13 +187,13 @@ POSTHOOK: Input: default@rcfile_events
#### A masked pattern was here ####
300
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201211/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201211, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201211'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -229,13 +229,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
POSTHOOK: Output: default@rcfile_events
POSTHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -249,13 +249,13 @@ POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name
POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).uid SIMPLE [(src_events)src_events.FieldSchema(name:uid, type:bigint, comment:null), ]
POSTHOOK: Lineage: rcfile_events PARTITION(run_date=201209,game_id=39,event_name=hq_change).user_id SIMPLE [(src_events)src_events.FieldSchema(name:user_id, type:bigint, comment:null), ]
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201209/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION (run_date=201209, game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
@@ -376,13 +376,13 @@ POSTHOOK: Input: ### test.blobstore.path ###/rcfile_nonstd_partitions_loc/rcfile
POSTHOOK: Output: default@rcfile_events
POSTHOOK: Output: default@rcfile_events@run_date=201207/game_id=39/event_name=hq_change
PREHOOK: query: INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
PREHOOK: type: QUERY
PREHOOK: Input: default@src_events
PREHOOK: Output: default@rcfile_events@run_date=201207/game_id=39/event_name=hq_change
POSTHOOK: query: INSERT INTO TABLE rcfile_events PARTITION(run_date=201207,game_id=39, event_name='hq_change')
-SELECT log_id,time,uid,user_id,type,event_data,session_id,full_uid FROM src_events
+SELECT log_id,`time`,uid,user_id,type,event_data,session_id,full_uid FROM src_events
WHERE SUBSTR(run_date,1,6)='201209'
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src_events
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index ade1900..6742423 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -47,6 +47,7 @@ import java.util.Map;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde2.thrift.Type;
import org.apache.hive.service.cli.TableSchema;
@@ -442,6 +443,8 @@ public abstract class HiveBaseResultSet implements ResultSet {
return value;
case TIMESTAMP_TYPE:
return Timestamp.valueOf((String) value);
+ case TIMESTAMPTZ_TYPE:
+ return TimestampTZ.parse((String) value);
case DECIMAL_TYPE:
return new BigDecimal((String)value);
case DATE_TYPE:
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
index 38918f0..bf42f0d 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
@@ -26,6 +26,7 @@ import java.sql.Types;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.thrift.Type;
@@ -94,6 +95,8 @@ public class JdbcColumn {
return Double.class.getName();
case Types.TIMESTAMP:
return Timestamp.class.getName();
+ case Types.TIMESTAMP_WITH_TIMEZONE:
+ return TimestampTZ.class.getName();
case Types.DECIMAL:
return BigInteger.class.getName();
case Types.BINARY:
@@ -142,6 +145,8 @@ public class JdbcColumn {
return Type.DATE_TYPE;
} else if ("timestamp".equalsIgnoreCase(type)) {
return Type.TIMESTAMP_TYPE;
+ } else if (serdeConstants.TIMESTAMPTZ_TYPE_NAME.equalsIgnoreCase(type)) {
+ return Type.TIMESTAMPTZ_TYPE;
} else if ("interval_year_month".equalsIgnoreCase(type)) {
return Type.INTERVAL_YEAR_MONTH_TYPE;
} else if ("interval_day_time".equalsIgnoreCase(type)) {
@@ -195,6 +200,8 @@ public class JdbcColumn {
return serdeConstants.BIGINT_TYPE_NAME;
} else if ("timestamp".equalsIgnoreCase(type)) {
return serdeConstants.TIMESTAMP_TYPE_NAME;
+ } else if (serdeConstants.TIMESTAMPTZ_TYPE_NAME.equalsIgnoreCase(type)) {
+ return serdeConstants.TIMESTAMPTZ_TYPE_NAME;
} else if ("date".equalsIgnoreCase(type)) {
return serdeConstants.DATE_TYPE_NAME;
} else if ("interval_year_month".equalsIgnoreCase(type)) {
@@ -240,6 +247,7 @@ public class JdbcColumn {
case Types.DATE:
return 10;
case Types.TIMESTAMP:
+ case Types.TIMESTAMP_WITH_TIMEZONE:
return columnPrecision(hiveType, columnAttributes);
// see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
@@ -294,6 +302,8 @@ public class JdbcColumn {
return 10;
case Types.TIMESTAMP:
return 29;
+ case Types.TIMESTAMP_WITH_TIMEZONE:
+ return 31;
case Types.DECIMAL:
return columnAttributes.precision;
case Types.OTHER:
@@ -338,6 +348,7 @@ public class JdbcColumn {
case Types.DOUBLE:
return 15;
case Types.TIMESTAMP:
+ case Types.TIMESTAMP_WITH_TIMEZONE:
return 9;
case Types.DECIMAL:
return columnAttributes.scale;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index bf18a8d..9795f3e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -402,6 +402,7 @@ public final class FunctionRegistry {
system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class);
system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class);
+ system.registerGenericUDF(serdeConstants.TIMESTAMPTZ_TYPE_NAME, GenericUDFToTimestampTZ.class);
system.registerGenericUDF(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, GenericUDFToIntervalYearMonth.class);
system.registerGenericUDF(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, GenericUDFToIntervalDayTime.class);
system.registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, GenericUDFToBinary.class);
@@ -1536,7 +1537,8 @@ public final class FunctionRegistry {
udfClass == UDFToShort.class || udfClass == UDFToString.class ||
udfClass == GenericUDFToVarchar.class || udfClass == GenericUDFToChar.class ||
udfClass == GenericUDFTimestamp.class || udfClass == GenericUDFToBinary.class ||
- udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class;
+ udfClass == GenericUDFToDate.class || udfClass == GenericUDFToDecimal.class ||
+ udfClass == GenericUDFToTimestampTZ.class;
}
/**
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
index f8b55da..af5e90f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
@@ -32,6 +32,7 @@ import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.llap.LlapDaemonInfo;
import org.apache.hadoop.hive.ql.CompilationOpContext;
@@ -471,6 +472,7 @@ public class GroupByOperator extends Operator<GroupByDesc> {
keyPositionsSize.add(new Integer(pos));
return javaObjectOverHead;
case TIMESTAMP:
+ case TIMESTAMPTZ:
return javaObjectOverHead + javaSizePrimitiveType;
default:
return javaSizeUnknownType;
@@ -503,7 +505,7 @@ public class GroupByOperator extends Operator<GroupByDesc> {
return javaSizePrimitiveType;
}
- if (c.isInstance(new Timestamp(0))){
+ if (c.isInstance(new Timestamp(0)) || c.isInstance(new TimestampTZ())) {
return javaObjectOverHead + javaSizePrimitiveType;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
index 01a652d..a29dd85 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SerializationUtilities.java
@@ -38,6 +38,7 @@ import java.util.Properties;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.common.CopyOnFirstWriteProperties;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.exec.vector.VectorFileSinkOperator;
@@ -223,6 +224,7 @@ public class SerializationUtilities {
KryoWithHooks kryo = new KryoWithHooks();
kryo.register(java.sql.Date.class, new SqlDateSerializer());
kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
+ kryo.register(TimestampTZ.class, new TimestampTZSerializer());
kryo.register(Path.class, new PathSerializer());
kryo.register(Arrays.asList("").getClass(), new ArraysAsListSerializer());
kryo.register(CopyOnFirstWriteProperties.class, new CopyOnFirstWritePropertiesSerializer());
@@ -307,6 +309,22 @@ public class SerializationUtilities {
}
}
+ private static class TimestampTZSerializer extends com.esotericsoftware.kryo.Serializer<TimestampTZ> {
+
+ @Override
+ public void write(Kryo kryo, Output output, TimestampTZ object) {
+ output.writeLong(object.getEpochSecond());
+ output.writeInt(object.getNanos());
+ }
+
+ @Override
+ public TimestampTZ read(Kryo kryo, Input input, Class<TimestampTZ> type) {
+ long seconds = input.readLong();
+ int nanos = input.readInt();
+ return new TimestampTZ(seconds, nanos);
+ }
+ }
+
/**
* Custom Kryo serializer for sql date, otherwise Kryo gets confused between
* java.sql.Date and java.util.Date while deserializing
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
index 38308c9..2df7588 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
@@ -200,6 +200,9 @@ public class TypeConverter {
case TIMESTAMP:
convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
break;
+ case TIMESTAMPTZ:
+ convertedType = dtFactory.createSqlType(SqlTypeName.OTHER);
+ break;
case INTERVAL_YEAR_MONTH:
convertedType = dtFactory.createSqlIntervalType(
new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1)));
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0cf9205..77bc12c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -195,6 +195,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
TokenToTypeName.put(HiveParser.TOK_DATE, serdeConstants.DATE_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_DATETIME, serdeConstants.DATETIME_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME);
+ TokenToTypeName.put(HiveParser.TOK_TIMESTAMPTZ, serdeConstants.TIMESTAMPTZ_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_INTERVAL_DAY_TIME, serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
TokenToTypeName.put(HiveParser.TOK_DECIMAL, serdeConstants.DECIMAL_TYPE_NAME);
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 190b66b..cebe441 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -130,6 +130,9 @@ KW_PRECISION: 'PRECISION';
KW_DATE: 'DATE';
KW_DATETIME: 'DATETIME';
KW_TIMESTAMP: 'TIMESTAMP';
+KW_TIMESTAMPTZ: 'TIMESTAMPTZ';
+KW_TIME: 'TIME';
+KW_ZONE: 'ZONE';
KW_INTERVAL: 'INTERVAL';
KW_DECIMAL: 'DECIMAL' | 'DEC';
KW_STRING: 'STRING';
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 3136c93..218fa8a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -123,6 +123,7 @@ TOK_DATELITERAL;
TOK_DATETIME;
TOK_TIMESTAMP;
TOK_TIMESTAMPLITERAL;
+TOK_TIMESTAMPTZ;
TOK_INTERVAL_YEAR_MONTH;
TOK_INTERVAL_YEAR_MONTH_LITERAL;
TOK_INTERVAL_DAY_TIME;
@@ -490,6 +491,9 @@ import org.apache.hadoop.hive.conf.HiveConf;
xlateMap.put("KW_DATE", "DATE");
xlateMap.put("KW_DATETIME", "DATETIME");
xlateMap.put("KW_TIMESTAMP", "TIMESTAMP");
+ xlateMap.put("KW_TIMESTAMPTZ", "TIMESTAMPTZ");
+ xlateMap.put("KW_TIME", "TIME");
+ xlateMap.put("KW_ZONE", "ZONE");
xlateMap.put("KW_STRING", "STRING");
xlateMap.put("KW_BINARY", "BINARY");
xlateMap.put("KW_ARRAY", "ARRAY");
@@ -2356,6 +2360,8 @@ primitiveType
| KW_DATE -> TOK_DATE
| KW_DATETIME -> TOK_DATETIME
| KW_TIMESTAMP -> TOK_TIMESTAMP
+ | KW_TIMESTAMPTZ -> TOK_TIMESTAMPTZ
+ | KW_TIMESTAMP KW_WITH KW_TIME KW_ZONE -> TOK_TIMESTAMPTZ
// Uncomment to allow intervals as table column types
//| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH
//| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME
[2/3] hive git commit: HIVE-14412: Add timestamp with time zone (Rui
Li reviewed by Xuefu Zhang, Pengcheng Xiong, Carter Shanklin,
Ashutosh Chauhan)
Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 1c78c1a..003e09f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -818,6 +818,8 @@ nonReserved
| KW_EXPRESSION
| KW_DETAIL
| KW_WAIT
+ | KW_ZONE
+ | KW_TIMESTAMPTZ
;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 82141be..f678d0b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -747,6 +747,8 @@ public class TypeCheckProcFactory {
serdeConstants.DATE_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
serdeConstants.TIMESTAMP_TYPE_NAME);
+ conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMPTZ,
+ serdeConstants.TIMESTAMPTZ_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index bda2050..76f7dae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -100,6 +100,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjec
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampTZObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -743,7 +744,8 @@ public class StatsUtils {
} else if (colTypeLowerCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
cs.setAvgColLen(csd.getBinaryStats().getAvgColLen());
cs.setNumNulls(csd.getBinaryStats().getNumNulls());
- } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+ colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
cs.setAvgColLen(JavaDataModel.get().lengthOfTimestamp());
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
cs.setAvgColLen(JavaDataModel.get().lengthOfDecimal());
@@ -1042,7 +1044,8 @@ public class StatsUtils {
|| colTypeLowerCase.equals(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME)
|| colTypeLowerCase.equals("long")) {
return JavaDataModel.get().primitive2();
- } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+ colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
return JavaDataModel.get().lengthOfTimestamp();
} else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
return JavaDataModel.get().lengthOfDate();
@@ -1079,7 +1082,8 @@ public class StatsUtils {
return JavaDataModel.get().lengthForByteArrayOfSize(length);
} else if (colTypeLowerCase.equals(serdeConstants.BOOLEAN_TYPE_NAME)) {
return JavaDataModel.get().lengthForBooleanArrayOfSize(length);
- } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+ colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
return JavaDataModel.get().lengthForTimestampArrayOfSize(length);
} else if (colTypeLowerCase.equals(serdeConstants.DATE_TYPE_NAME)) {
return JavaDataModel.get().lengthForDateArrayOfSize(length);
@@ -1164,7 +1168,8 @@ public class StatsUtils {
return JavaDataModel.get().primitive2();
} else if (oi instanceof WritableShortObjectInspector) {
return JavaDataModel.get().primitive1();
- } else if (oi instanceof WritableTimestampObjectInspector) {
+ } else if (oi instanceof WritableTimestampObjectInspector ||
+ oi instanceof WritableTimestampTZObjectInspector) {
return JavaDataModel.get().lengthOfTimestamp();
}
@@ -1543,7 +1548,8 @@ public class StatsUtils {
} else if (colTypeLowerCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
int acl = (int) Math.round(cs.getAvgColLen());
sizeOf = JavaDataModel.get().lengthForByteArrayOfSize(acl);
- } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+ } else if (colTypeLowerCase.equals(serdeConstants.TIMESTAMP_TYPE_NAME) ||
+ colTypeLowerCase.equals(serdeConstants.TIMESTAMPTZ_TYPE_NAME)) {
sizeOf = JavaDataModel.get().lengthOfTimestamp();
} else if (colTypeLowerCase.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
sizeOf = JavaDataModel.get().lengthOfDecimal();
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 7cdf2c3..1605877 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.FloatWritable;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index 5cacd59..c10552a 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
@@ -152,6 +153,15 @@ public class UDFToString extends UDF {
}
}
+ public Text evaluate(TimestampTZWritable i) {
+ if (i == null) {
+ return null;
+ } else {
+ t.set(i.toString());
+ return t;
+ }
+ }
+
public Text evaluate(HiveDecimalWritable i) {
if (i == null) {
return null;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index 68d98f5..6b67dea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -406,6 +406,7 @@ public abstract class GenericUDF implements Closeable {
case TIMESTAMP:
case DATE:
case VOID:
+ case TIMESTAMPTZ:
outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
default:
@@ -428,6 +429,7 @@ public abstract class GenericUDF implements Closeable {
case CHAR:
case TIMESTAMP:
case DATE:
+ case TIMESTAMPTZ:
break;
default:
throw new UDFArgumentTypeException(i, getFuncName()
@@ -502,6 +504,7 @@ public abstract class GenericUDF implements Closeable {
break;
case TIMESTAMP:
case DATE:
+ case TIMESTAMPTZ:
Object writableValue = converters[i].convert(obj);
date = ((DateWritable) writableValue).get();
break;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index 5a31e61..4247afd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -87,6 +87,7 @@ public class GenericUDFDate extends GenericUDF {
timestampConverter = new TimestampConverter(argumentOI,
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
+ case TIMESTAMPTZ:
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI,
PrimitiveObjectInspectorFactory.writableDateObjectInspector);
@@ -120,6 +121,7 @@ public class GenericUDFDate extends GenericUDF {
.getTimestamp();
output.set(DateWritable.millisToDays(ts.getTime()));
break;
+ case TIMESTAMPTZ:
case DATE:
DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
new file mode 100644
index 0000000..e96012b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToTimestampTZ.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * Convert from string to TIMESTAMP WITH TIME ZONE.
+ */
+@Description(name = "timestamp with time zone",
+ value = "CAST(STRING as TIMESTAMP WITH TIME ZONE) - returns the" +
+ "timestamp with time zone represented by string.",
+ extended = "The string should be of format 'yyyy-MM-dd HH:mm:ss[.SSS...] ZoneId/ZoneOffset'. " +
+ "Examples of ZoneId and ZoneOffset are Asia/Shanghai and GMT+08:00. " +
+ "The time and zone parts are optional. If time is absent, '00:00:00.0' will be used. " +
+ "If zone is absent, the system time zone will be used.")
+public class GenericUDFToTimestampTZ extends GenericUDF {
+
+ private transient PrimitiveObjectInspector argumentOI;
+ private transient PrimitiveObjectInspectorConverter.TimestampTZConverter converter;
+
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length < 1) {
+ throw new UDFArgumentLengthException(
+ "The function CAST as TIMESTAMP WITH TIME ZONE requires at least one argument, got "
+ + arguments.length);
+ }
+ try {
+ argumentOI = (PrimitiveObjectInspector) arguments[0];
+ switch (argumentOI.getPrimitiveCategory()) {
+ case CHAR:
+ case VARCHAR:
+ case STRING:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPTZ:
+ break;
+ default:
+ throw new UDFArgumentException("CAST as TIMESTAMP WITH TIME ZONE only allows" +
+ "string/date/timestamp/timestamp with time zone types");
+ }
+ } catch (ClassCastException e) {
+ throw new UDFArgumentException(
+ "The function CAST as TIMESTAMP WITH TIME ZONE takes only primitive types");
+ }
+ converter = new PrimitiveObjectInspectorConverter.TimestampTZConverter(argumentOI,
+ PrimitiveObjectInspectorFactory.writableTimestampTZObjectInspector);
+ return PrimitiveObjectInspectorFactory.writableTimestampTZObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ Object o0 = arguments[0].get();
+ if (o0 == null) {
+ return null;
+ }
+ return converter.convert(o0);
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ return "CAST(" + children[0] + " AS TIMESTAMP WITH TIME ZONE)";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
index 0dc6b19..8be8583 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSQL11ReservedKeyWordsNegative.java
@@ -30,7 +30,7 @@ import org.junit.Test;
/**
* Parser tests for SQL11 Reserved KeyWords. Please find more information in
- * HIVE-6617. Total number : 82
+ * HIVE-6617. Total number : 83
* ALL,ALTER,ARRAY,AS,AUTHORIZATION,BETWEEN,BIGINT,BINARY
* ,BOOLEAN,BOTH,BY,CONSTRAINT
* ,CREATE,CUBE,CURRENT_DATE,CURRENT_TIMESTAMP,CURSOR,
@@ -43,7 +43,7 @@ import org.junit.Test;
* ,PRIMARY,PROCEDURE,RANGE,READS,
* REFERENCES,REGEXP,REVOKE,RIGHT,RLIKE,ROLLUP,ROW
* ,ROWS,SET,SMALLINT,TABLE,TIMESTAMP
- * ,TO,TRIGGER,TRUE,TRUNCATE,UNION,UPDATE,USER,USING,VALUES,WITH,
+ * ,TO,TRIGGER,TRUE,TRUNCATE,UNION,UPDATE,USER,USING,VALUES,WITH,TIME
*/
public class TestSQL11ReservedKeyWordsNegative {
private static HiveConf conf;
@@ -1137,4 +1137,17 @@ public class TestSQL11ReservedKeyWordsNegative {
}
}
+ @Test
+ public void testSQL11ReservedKeyWords_TIME() {
+ try {
+ parse("CREATE TABLE TIME (col STRING)");
+ Assert.fail("Expected ParseException");
+ } catch (ParseException ex) {
+ Assert.assertEquals(
+ "Failure didn't match.",
+ "line 1:13 cannot recognize input near 'TIME' '(' 'col' in table name",
+ ex.getMessage());
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex.q b/ql/src/test/queries/clientnegative/serde_regex.q
index c9cfc7d..9a1776a 100644
--- a/ql/src/test/queries/clientnegative/serde_regex.q
+++ b/ql/src/test/queries/clientnegative/serde_regex.q
@@ -4,7 +4,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time TIMESTAMP,
+ `time` TIMESTAMP,
request STRING,
status INT,
size INT,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex2.q b/ql/src/test/queries/clientnegative/serde_regex2.q
index a29bb9c..12e802e 100644
--- a/ql/src/test/queries/clientnegative/serde_regex2.q
+++ b/ql/src/test/queries/clientnegative/serde_regex2.q
@@ -5,7 +5,7 @@ USE default;
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -21,4 +21,4 @@ LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_reg
LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
-- raise an exception
-SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file
+SELECT * FROM serde_regex ORDER BY `time`;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientnegative/serde_regex3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/serde_regex3.q b/ql/src/test/queries/clientnegative/serde_regex3.q
index 4e91f06..b7810b5 100644
--- a/ql/src/test/queries/clientnegative/serde_regex3.q
+++ b/ql/src/test/queries/clientnegative/serde_regex3.q
@@ -4,7 +4,7 @@ USE default;
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/create_like.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_like.q b/ql/src/test/queries/clientpositive/create_like.q
index bd39731..81172f3 100644
--- a/ql/src/test/queries/clientpositive/create_like.q
+++ b/ql/src/test/queries/clientpositive/create_like.q
@@ -84,7 +84,7 @@ DESCRIBE FORMATTED table6;
drop table table5;
create table orc_table (
-time string)
+`time` string)
stored as ORC tblproperties ("orc.compress"="SNAPPY");
create table orc_table_using_like like orc_table;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/join43.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/join43.q b/ql/src/test/queries/clientpositive/join43.q
index 12c45a6..b2e10dc 100644
--- a/ql/src/test/queries/clientpositive/join43.q
+++ b/ql/src/test/queries/clientpositive/join43.q
@@ -1,11 +1,11 @@
set hive.mapred.mode=nonstrict;
-create table purchase_history (s string, product string, price double, time int);
+create table purchase_history (s string, product string, price double, `time` int);
insert into purchase_history values ('1', 'Belt', 20.00, 21);
insert into purchase_history values ('1', 'Socks', 3.50, 31);
insert into purchase_history values ('3', 'Belt', 20.00, 51);
insert into purchase_history values ('4', 'Shirt', 15.50, 59);
-create table cart_history (s string, cart_id int, time int);
+create table cart_history (s string, cart_id int, `time` int);
insert into cart_history values ('1', 1, 10);
insert into cart_history values ('1', 2, 20);
insert into cart_history values ('1', 3, 30);
@@ -13,7 +13,7 @@ insert into cart_history values ('1', 4, 40);
insert into cart_history values ('3', 5, 50);
insert into cart_history values ('4', 6, 60);
-create table events (s string, st2 string, n int, time int);
+create table events (s string, st2 string, n int, `time` int);
insert into events values ('1', 'Bob', 1234, 20);
insert into events values ('1', 'Bob', 1234, 30);
insert into events values ('1', 'Bob', 1234, 25);
@@ -26,30 +26,30 @@ select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list;
select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list;
explain
@@ -57,28 +57,28 @@ select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list;
select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/serde_regex.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/serde_regex.q b/ql/src/test/queries/clientpositive/serde_regex.q
index e21c6e1..fc716ed 100644
--- a/ql/src/test/queries/clientpositive/serde_regex.q
+++ b/ql/src/test/queries/clientpositive/serde_regex.q
@@ -4,7 +4,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -20,7 +20,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -35,9 +35,9 @@ STORED AS TEXTFILE;
LOAD DATA LOCAL INPATH "../../data/files/apache.access.log" INTO TABLE serde_regex;
LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" INTO TABLE serde_regex;
-SELECT * FROM serde_regex ORDER BY time;
+SELECT * FROM serde_regex ORDER BY `time`;
-SELECT host, size, status, time from serde_regex ORDER BY time;
+SELECT host, size, status, `time` from serde_regex ORDER BY `time`;
DROP TABLE serde_regex;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz.q b/ql/src/test/queries/clientpositive/timestamptz.q
new file mode 100644
index 0000000..176fefd
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz.q
@@ -0,0 +1,11 @@
+explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone);
+select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone);
+
+explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
+select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz);
+
+explain select cast('2016-01-03Europe/London' as timestamptz);
+select cast('2016-01-03Europe/London' as timestamptz);
+
+explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz);
+select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz);
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz_1.q b/ql/src/test/queries/clientpositive/timestamptz_1.q
new file mode 100644
index 0000000..c11aea2
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_1.q
@@ -0,0 +1,25 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz1;
+
+create table tstz1(t timestamp with time zone);
+
+insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone);
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
+
+insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00';
+select cast(t as string) from tstz1;
+select cast(t as date) from tstz1;
+select cast(t as timestamp) from tstz1;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/queries/clientpositive/timestamptz_2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/timestamptz_2.q b/ql/src/test/queries/clientpositive/timestamptz_2.q
new file mode 100644
index 0000000..a335f52
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamptz_2.q
@@ -0,0 +1,19 @@
+set hive.fetch.task.conversion=more;
+
+drop table tstz2;
+
+create table tstz2(t timestamp with time zone);
+
+insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00');
+
+select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00';
+
+select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00';
+
+select min(t),max(t) from tstz2;
+
+select t from tstz2 group by t order by t;
+
+select * from tstz2 a join tstz2 b on a.t=b.t order by a.t;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex.q.out b/ql/src/test/results/clientnegative/serde_regex.q.out
index a1ec5ca..1047a82 100644
--- a/ql/src/test/results/clientnegative/serde_regex.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time TIMESTAMP,
+ `time` TIMESTAMP,
request STRING,
status INT,
size INT,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex2.q.out b/ql/src/test/results/clientnegative/serde_regex2.q.out
index 374675d..b0703fb 100644
--- a/ql/src/test/results/clientnegative/serde_regex2.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex2.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -26,7 +26,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
@@ -56,7 +56,7 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
PREHOOK: type: QUERY
PREHOOK: Input: default@serde_regex
#### A masked pattern was here ####
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientnegative/serde_regex3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/serde_regex3.q.out b/ql/src/test/results/clientnegative/serde_regex3.q.out
index dc0a0e2..33d647b 100644
--- a/ql/src/test/results/clientnegative/serde_regex3.q.out
+++ b/ql/src/test/results/clientnegative/serde_regex3.q.out
@@ -8,7 +8,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size STRING,
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/create_like.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out
index ff2e752..589ce59 100644
--- a/ql/src/test/results/clientpositive/create_like.q.out
+++ b/ql/src/test/results/clientpositive/create_like.q.out
@@ -657,13 +657,13 @@ POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@table5
POSTHOOK: Output: default@table5
PREHOOK: query: create table orc_table (
-time string)
+`time` string)
stored as ORC tblproperties ("orc.compress"="SNAPPY")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@orc_table
POSTHOOK: query: create table orc_table (
-time string)
+`time` string)
stored as ORC tblproperties ("orc.compress"="SNAPPY")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/join43.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join43.q.out b/ql/src/test/results/clientpositive/join43.q.out
index e8c7278..24168ca 100644
--- a/ql/src/test/results/clientpositive/join43.q.out
+++ b/ql/src/test/results/clientpositive/join43.q.out
@@ -1,8 +1,8 @@
-PREHOOK: query: create table purchase_history (s string, product string, price double, time int)
+PREHOOK: query: create table purchase_history (s string, product string, price double, `time` int)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@purchase_history
-POSTHOOK: query: create table purchase_history (s string, product string, price double, time int)
+POSTHOOK: query: create table purchase_history (s string, product string, price double, `time` int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@purchase_history
@@ -46,11 +46,11 @@ POSTHOOK: Lineage: purchase_history.price EXPRESSION [(values__tmp__table__4)val
POSTHOOK: Lineage: purchase_history.product SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
POSTHOOK: Lineage: purchase_history.s SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
POSTHOOK: Lineage: purchase_history.time EXPRESSION [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col4, type:string, comment:), ]
-PREHOOK: query: create table cart_history (s string, cart_id int, time int)
+PREHOOK: query: create table cart_history (s string, cart_id int, `time` int)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@cart_history
-POSTHOOK: query: create table cart_history (s string, cart_id int, time int)
+POSTHOOK: query: create table cart_history (s string, cart_id int, `time` int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@cart_history
@@ -108,11 +108,11 @@ POSTHOOK: Output: default@cart_history
POSTHOOK: Lineage: cart_history.cart_id EXPRESSION [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
POSTHOOK: Lineage: cart_history.s SIMPLE [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
POSTHOOK: Lineage: cart_history.time EXPRESSION [(values__tmp__table__10)values__tmp__table__10.FieldSchema(name:tmp_values_col3, type:string, comment:), ]
-PREHOOK: query: create table events (s string, st2 string, n int, time int)
+PREHOOK: query: create table events (s string, st2 string, n int, `time` int)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@events
-POSTHOOK: query: create table events (s string, st2 string, n int, time int)
+POSTHOOK: query: create table events (s string, st2 string, n int, `time` int)
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@events
@@ -181,15 +181,15 @@ select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
PREHOOK: type: QUERY
POSTHOOK: query: explain
@@ -197,15 +197,15 @@ select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
@@ -357,15 +357,15 @@ PREHOOK: query: select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
PREHOOK: type: QUERY
PREHOOK: Input: default@cart_history
@@ -376,15 +376,15 @@ POSTHOOK: query: select s
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cart_history
@@ -399,15 +399,15 @@ select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
PREHOOK: type: QUERY
POSTHOOK: query: explain
@@ -415,15 +415,15 @@ select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
@@ -577,15 +577,15 @@ PREHOOK: query: select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
PREHOOK: type: QUERY
PREHOOK: Input: default@cart_history
@@ -596,15 +596,15 @@ POSTHOOK: query: select *
from (
select last.*, action.st2, action.n
from (
- select purchase.s, purchase.time, max (mevt.time) as last_stage_time
+ select purchase.s, purchase.`time`, max (mevt.`time`) as last_stage_time
from (select * from purchase_history) purchase
join (select * from cart_history) mevt
on purchase.s = mevt.s
- where purchase.time > mevt.time
- group by purchase.s, purchase.time
+ where purchase.`time` > mevt.`time`
+ group by purchase.s, purchase.`time`
) last
join (select * from events) action
- on last.s = action.s and last.last_stage_time = action.time
+ on last.s = action.s and last.last_stage_time = action.`time`
) list
POSTHOOK: type: QUERY
POSTHOOK: Input: default@cart_history
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/serde_regex.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/serde_regex.q.out b/ql/src/test/results/clientpositive/serde_regex.q.out
index 7bebb0c..5a19ec9 100644
--- a/ql/src/test/results/clientpositive/serde_regex.q.out
+++ b/ql/src/test/results/clientpositive/serde_regex.q.out
@@ -3,7 +3,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -20,7 +20,7 @@ CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -51,7 +51,7 @@ PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -69,7 +69,7 @@ POSTHOOK: query: CREATE TABLE serde_regex(
host STRING,
identity STRING,
`user` STRING,
- time STRING,
+ `time` STRING,
request STRING,
status STRING,
size INT,
@@ -99,21 +99,21 @@ POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/apache.access.2.log" I
POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@serde_regex
-PREHOOK: query: SELECT * FROM serde_regex ORDER BY time
+PREHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
PREHOOK: type: QUERY
PREHOOK: Input: default@serde_regex
#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM serde_regex ORDER BY time
+POSTHOOK: query: SELECT * FROM serde_regex ORDER BY `time`
POSTHOOK: type: QUERY
POSTHOOK: Input: default@serde_regex
#### A masked pattern was here ####
127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326 NULL NULL
127.0.0.1 - - [26/May/2009:00:00:00 +0000] "GET /someurl/?track=Blabla(Main) HTTP/1.1" 200 5864 - "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19"
-PREHOOK: query: SELECT host, size, status, time from serde_regex ORDER BY time
+PREHOOK: query: SELECT host, size, status, `time` from serde_regex ORDER BY `time`
PREHOOK: type: QUERY
PREHOOK: Input: default@serde_regex
#### A masked pattern was here ####
-POSTHOOK: query: SELECT host, size, status, time from serde_regex ORDER BY time
+POSTHOOK: query: SELECT host, size, status, `time` from serde_regex ORDER BY `time`
POSTHOOK: type: QUERY
POSTHOOK: Input: default@serde_regex
#### A masked pattern was here ####
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz.q.out b/ql/src/test/results/clientpositive/timestamptz.q.out
new file mode 100644
index 0000000..626fe92
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz.q.out
@@ -0,0 +1,124 @@
+PREHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2005-01-03 02:01:00.0 Z (type: timestamp with time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2005-01-03 02:01:00 GMT' as timestamp with time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+PREHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2016-01-03 20:26:34.0123 Z (type: timestamp with time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03 12:26:34.0123 America/Los_Angeles' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 20:26:34.0123 Z
+PREHOOK: query: explain select cast('2016-01-03Europe/London' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03Europe/London' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2016-01-03 00:00:00.0 Z (type: timestamp with time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2016-01-03Europe/London' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03Europe/London' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 00:00:00.0 Z
+PREHOOK: query: explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: COMPLETE
+ Select Operator
+ expressions: 2016-01-03 12:34:56.38 Z (type: timestamp with time zone)
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select cast('2016-01-03 13:34:56.38 +1:00' as timestamptz)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2016-01-03 12:34:56.38 Z
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_1.q.out b/ql/src/test/results/clientpositive/timestamptz_1.q.out
new file mode 100644
index 0000000..75bbfac
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_1.q.out
@@ -0,0 +1,156 @@
+PREHOOK: query: drop table tstz1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz1(t timestamp with time zone)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: create table tstz1(t timestamp with time zone)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz1
+PREHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select cast('2016-01-03 12:26:34 America/Los_Angeles' as timestamp with time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 20:26:34.0 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 20:26:34
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.1 GMT'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.1 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.1
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.0123 Europe/London'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 12:26:34.0123
+PREHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tstz1
+POSTHOOK: query: insert overwrite table tstz1 select '2016-01-03 12:26:34.012300 GMT+08:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tstz1
+POSTHOOK: Lineage: tstz1.t EXPRESSION []
+PREHOOK: query: select cast(t as string) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as string) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select cast(t as date) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as date) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03
+PREHOOK: query: select cast(t as timestamp) from tstz1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(t as timestamp) from tstz1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz1
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/ql/src/test/results/clientpositive/timestamptz_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_2.q.out b/ql/src/test/results/clientpositive/timestamptz_2.q.out
new file mode 100644
index 0000000..2666735
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamptz_2.q.out
@@ -0,0 +1,78 @@
+PREHOOK: query: drop table tstz2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tstz2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tstz2(t timestamp with time zone)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: create table tstz2(t timestamp with time zone)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tstz2
+PREHOOK: query: insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tstz2
+POSTHOOK: query: insert into table tstz2 values
+ ('2005-04-03 03:01:00.04067 GMT-07:00'),('2005-01-03 02:01:00 GMT'),('2005-01-03 06:01:00 GMT+04:00'),
+ ('2013-06-03 02:01:00.30547 GMT+01:00'),('2016-01-03 12:26:34.0123 GMT+08:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tstz2
+POSTHOOK: Lineage: tstz2.t EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t='2005-01-02 19:01:00 GMT-07:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z
+PREHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 where t>'2013-06-03 02:01:00.30547 GMT+01:00'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select min(t),max(t) from tstz2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select min(t),max(t) from tstz2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z 2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select t from tstz2 group by t order by t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select t from tstz2 group by t order by t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z
+2005-04-03 10:01:00.04067 Z
+2013-06-03 01:01:00.30547 Z
+2016-01-03 04:26:34.0123 Z
+PREHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tstz2 a join tstz2 b on a.t=b.t order by a.t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tstz2
+#### A masked pattern was here ####
+2005-01-03 02:01:00.0 Z 2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z 2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z 2005-01-03 02:01:00.0 Z
+2005-01-03 02:01:00.0 Z 2005-01-03 02:01:00.0 Z
+2005-04-03 10:01:00.04067 Z 2005-04-03 10:01:00.04067 Z
+2013-06-03 01:01:00.30547 Z 2013-06-03 01:01:00.30547 Z
+2016-01-03 04:26:34.0123 Z 2016-01-03 04:26:34.0123 Z
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/if/serde.thrift
----------------------------------------------------------------------
diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift
index 1d40d5a..56d8beb 100644
--- a/serde/if/serde.thrift
+++ b/serde/if/serde.thrift
@@ -64,6 +64,7 @@ const string DECIMAL_TYPE_NAME = "decimal";
const string BINARY_TYPE_NAME = "binary";
const string INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month";
const string INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time";
+const string TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
const string LIST_TYPE_NAME = "array";
const string MAP_TYPE_NAME = "map";
@@ -95,6 +96,7 @@ const set<string> PrimitiveTypes = [
INTERVAL_DAY_TIME_TYPE_NAME
DECIMAL_TYPE_NAME
BINARY_TYPE_NAME
+ TIMESTAMPTZ_TYPE_NAME
],
const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
index 907acf2..18a3458 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
@@ -85,6 +85,8 @@ serdeConstants::serdeConstants() {
INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time";
+ TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
+
LIST_TYPE_NAME = "array";
MAP_TYPE_NAME = "map";
@@ -119,6 +121,7 @@ serdeConstants::serdeConstants() {
PrimitiveTypes.insert("interval_day_time");
PrimitiveTypes.insert("decimal");
PrimitiveTypes.insert("binary");
+ PrimitiveTypes.insert("timestamp with time zone");
CollectionTypes.insert("array");
CollectionTypes.insert("map");
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-cpp/serde_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h
index 8785bd2..e261ed3 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h
@@ -52,6 +52,7 @@ class serdeConstants {
std::string BINARY_TYPE_NAME;
std::string INTERVAL_YEAR_MONTH_TYPE_NAME;
std::string INTERVAL_DAY_TIME_TYPE_NAME;
+ std::string TIMESTAMPTZ_TYPE_NAME;
std::string LIST_TYPE_NAME;
std::string MAP_TYPE_NAME;
std::string STRUCT_TYPE_NAME;
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
index 2578d3e..398ec6f 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
@@ -110,6 +110,8 @@ public class serdeConstants {
public static final String INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time";
+ public static final String TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone";
+
public static final String LIST_TYPE_NAME = "array";
public static final String MAP_TYPE_NAME = "map";
@@ -146,6 +148,7 @@ public class serdeConstants {
PrimitiveTypes.add("interval_day_time");
PrimitiveTypes.add("decimal");
PrimitiveTypes.add("binary");
+ PrimitiveTypes.add("timestamp with time zone");
}
public static final Set<String> CollectionTypes = new HashSet<String>();
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
index ea2dbbe..2141e8f 100644
--- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
+++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
@@ -55,6 +55,7 @@ final class Constant extends \Thrift\Type\TConstant {
static protected $BINARY_TYPE_NAME;
static protected $INTERVAL_YEAR_MONTH_TYPE_NAME;
static protected $INTERVAL_DAY_TIME_TYPE_NAME;
+ static protected $TIMESTAMPTZ_TYPE_NAME;
static protected $LIST_TYPE_NAME;
static protected $MAP_TYPE_NAME;
static protected $STRUCT_TYPE_NAME;
@@ -215,6 +216,10 @@ final class Constant extends \Thrift\Type\TConstant {
return "interval_day_time";
}
+ static protected function init_TIMESTAMPTZ_TYPE_NAME() {
+ return "timestamp with time zone";
+ }
+
static protected function init_LIST_TYPE_NAME() {
return "array";
}
@@ -267,6 +272,7 @@ final class Constant extends \Thrift\Type\TConstant {
"interval_day_time" => true,
"decimal" => true,
"binary" => true,
+ "timestamp with time zone" => true,
);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
index e3b24eb..bc8387b 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
@@ -46,6 +46,7 @@ DECIMAL_TYPE_NAME = "decimal"
BINARY_TYPE_NAME = "binary"
INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"
INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time"
+TIMESTAMPTZ_TYPE_NAME = "timestamp with time zone"
LIST_TYPE_NAME = "array"
MAP_TYPE_NAME = "map"
STRUCT_TYPE_NAME = "struct"
@@ -73,6 +74,7 @@ PrimitiveTypes = set([
"interval_day_time",
"decimal",
"binary",
+ "timestamp with time zone",
])
CollectionTypes = set([
"array",
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/gen/thrift/gen-rb/serde_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb
index 15efaea..da8748d 100644
--- a/serde/src/gen/thrift/gen-rb/serde_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb
@@ -81,6 +81,8 @@ INTERVAL_YEAR_MONTH_TYPE_NAME = %q"interval_year_month"
INTERVAL_DAY_TIME_TYPE_NAME = %q"interval_day_time"
+TIMESTAMPTZ_TYPE_NAME = %q"timestamp with time zone"
+
LIST_TYPE_NAME = %q"array"
MAP_TYPE_NAME = %q"map"
@@ -116,6 +118,7 @@ PrimitiveTypes = Set.new([
%q"interval_day_time",
%q"decimal",
%q"binary",
+ %q"timestamp with time zone",
])
CollectionTypes = Set.new([
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index 5ecfbca..9ead0ed 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.slf4j.Logger;
@@ -276,6 +277,12 @@ public final class SerDeUtils {
sb.append('"');
break;
}
+ case TIMESTAMPTZ: {
+ sb.append('"');
+ sb.append(((TimestampTZObjectInspector) poi).getPrimitiveWritableObject(o));
+ sb.append('"');
+ break;
+ }
case BINARY: {
BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
Text txt = new Text();
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index 89e15c3..f333ae9 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -76,6 +77,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspect
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampTZObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -420,7 +422,15 @@ public class BinarySortableSerDe extends AbstractSerDe {
}
t.setBinarySortable(bytes, 0);
return t;
-
+ case TIMESTAMPTZ:
+ TimestampTZWritable tstz = (reuse == null ? new TimestampTZWritable() :
+ (TimestampTZWritable) reuse);
+ byte[] data = new byte[TimestampTZWritable.BINARY_SORTABLE_LENGTH];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = buffer.read(invert);
+ }
+ tstz.fromBinarySortable(data, 0);
+ return tstz;
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable()
: (HiveIntervalYearMonthWritable) reuse;
@@ -788,6 +798,12 @@ public class BinarySortableSerDe extends AbstractSerDe {
serializeTimestampWritable(buffer, t, invert);
return;
}
+ case TIMESTAMPTZ: {
+ TimestampTZObjectInspector toi = (TimestampTZObjectInspector) poi;
+ TimestampTZWritable t = toi.getPrimitiveWritableObject(o);
+ serializeTimestampTZWritable(buffer, t, invert);
+ return;
+ }
case INTERVAL_YEAR_MONTH: {
HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi;
HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o);
@@ -958,6 +974,14 @@ public class BinarySortableSerDe extends AbstractSerDe {
}
}
+ public static void serializeTimestampTZWritable(
+ ByteStream.Output buffer, TimestampTZWritable t, boolean invert) {
+ byte[] data = t.toBinarySortable();
+ for (byte b : data) {
+ writeByte(buffer, b, invert);
+ }
+ }
+
public static void serializeHiveIntervalYearMonth(ByteStream.Output buffer,
HiveIntervalYearMonth intervalYearMonth, boolean invert) {
int totalMonths = intervalYearMonth.getTotalMonths();
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
new file mode 100644
index 0000000..8c3f8f6
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampTZWritable.java
@@ -0,0 +1,427 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Arrays;
+
+/**
+ * Writable for TimestampTZ. Copied from TimestampWritable.
+ * After we replace {@link java.sql.Timestamp} with {@link java.time.LocalDateTime} for Timestamp,
+ * it'll need a new Writable.
+ */
+public class TimestampTZWritable implements WritableComparable<TimestampTZWritable> {
+
+ public static final byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
+ private static final int DECIMAL_OR_SECOND_VINT_FLAG = 1 << 31;
+ private static final long SEVEN_BYTE_LONG_SIGN_FLIP = 0xff80L << 48; // only need flip the MSB?
+
+ /**
+ * The maximum number of bytes required for a TimestampWritable
+ */
+ public static final int MAX_BYTES = 13;
+
+ public static final int BINARY_SORTABLE_LENGTH = 11;
+
+ private TimestampTZ timestampTZ = new TimestampTZ();
+
+ /**
+ * true if data is stored in timestamptz field rather than byte arrays.
+ * allows for lazy conversion to bytes when necessary
+ * false otherwise
+ */
+ private boolean bytesEmpty = true;
+ private boolean timestampTZEmpty = true;
+
+ /* Allow use of external byte[] for efficiency */
+ private byte[] currentBytes;
+ private final byte[] internalBytes = new byte[MAX_BYTES];
+ private byte[] externalBytes;
+ private int offset;
+
+ public TimestampTZWritable() {
+ bytesEmpty = false;
+ currentBytes = internalBytes;
+ offset = 0;
+ }
+
+ public TimestampTZWritable(byte[] bytes, int offset) {
+ set(bytes, offset);
+ }
+
+ public TimestampTZWritable(TimestampTZWritable other) {
+ this(other.getBytes(), 0);
+ }
+
+ public TimestampTZWritable(TimestampTZ tstz) {
+ set(tstz);
+ }
+
+ public void set(byte[] bytes, int offset) {
+ externalBytes = bytes;
+ this.offset = offset;
+ bytesEmpty = false;
+ timestampTZEmpty = true;
+ currentBytes = externalBytes;
+ }
+
+ public void set(TimestampTZ tstz) {
+ if (tstz == null) {
+ timestampTZ.setZonedDateTime(null);
+ return;
+ }
+ timestampTZ = tstz;
+ bytesEmpty = true;
+ timestampTZEmpty = false;
+ }
+
+ public void set(TimestampTZWritable t) {
+ if (t.bytesEmpty) {
+ set(t.getTimestampTZ());
+ } else if (t.currentBytes == t.externalBytes) {
+ set(t.currentBytes, t.offset);
+ } else {
+ set(t.currentBytes, 0);
+ }
+ }
+
+ public TimestampTZ getTimestampTZ() {
+ populateTimestampTZ();
+ return timestampTZ;
+ }
+
+ /**
+ * Used to create copies of objects
+ *
+ * @return a copy of the internal TimestampTZWritable byte[]
+ */
+ public byte[] getBytes() {
+ checkBytes();
+
+ int len = getTotalLength();
+ byte[] b = new byte[len];
+
+ System.arraycopy(currentBytes, offset, b, 0, len);
+ return b;
+ }
+
+ /**
+ * @return length of serialized TimestampTZWritable data. As a side effect, populates the internal
+ * byte array if empty.
+ */
+ private int getTotalLength() {
+ checkBytes();
+ return getTotalLength(currentBytes, offset);
+ }
+
+ /**
+ * The data of TimestampTZWritable can be stored either in a byte[]
+ * or in a TimestampTZ object. Calling this method ensures that the byte[]
+ * is populated from the TimestampTZ object if previously empty.
+ */
+ private void checkBytes() {
+ if (bytesEmpty) {
+ populateBytes();
+ offset = 0;
+ currentBytes = internalBytes;
+ bytesEmpty = false;
+ }
+ }
+
+ // Writes the TimestampTZ's serialized value to the internal byte array.
+ private void populateBytes() {
+ Arrays.fill(internalBytes, (byte) 0);
+
+ long seconds = timestampTZ.getEpochSecond();
+ int nanos = timestampTZ.getNanos();
+
+ boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
+ boolean hasDecimal = setNanosBytes(nanos, internalBytes, offset + 4, hasSecondVInt);
+
+ int firstInt = (int) seconds;
+ if (hasDecimal || hasSecondVInt) {
+ firstInt |= DECIMAL_OR_SECOND_VINT_FLAG;
+ }
+ intToBytes(firstInt, internalBytes, offset);
+ if (hasSecondVInt) {
+ LazyBinaryUtils.writeVLongToByteArray(internalBytes,
+ offset + 4 + WritableUtils.decodeVIntSize(internalBytes[offset + 4]),
+ seconds >> 31);
+ }
+ }
+
+ private void populateTimestampTZ() {
+ if (timestampTZEmpty) {
+ long seconds = getSeconds();
+ int nanos = getNanos();
+ timestampTZ.set(seconds, nanos);
+ timestampTZEmpty = false;
+ }
+ }
+
+ public long getSeconds() {
+ if (!timestampTZEmpty) {
+ return timestampTZ.getEpochSecond();
+ } else if (!bytesEmpty) {
+ return getSeconds(currentBytes, offset);
+ }
+ throw new IllegalStateException("Both timestamp and bytes are empty");
+ }
+
+ public int getNanos() {
+ if (!timestampTZEmpty) {
+ return timestampTZ.getNanos();
+ } else if (!bytesEmpty) {
+ return hasDecimalOrSecondVInt(currentBytes[offset]) ? getNanos(currentBytes, offset + 4) : 0;
+ }
+ throw new IllegalStateException("Both timestamp and bytes are empty");
+ }
+
+ @Override
+ public int compareTo(TimestampTZWritable o) {
+ return getTimestampTZ().compareTo(o.getTimestampTZ());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof TimestampTZWritable) {
+ return compareTo((TimestampTZWritable) o) == 0;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return getTimestampTZ().hashCode();
+ }
+
+ @Override
+ public String toString() {
+ populateTimestampTZ();
+ return timestampTZ.toString();
+ }
+
+ @Override
+ public void write(DataOutput dataOutput) throws IOException {
+ checkBytes();
+ dataOutput.write(currentBytes, offset, getTotalLength());
+ }
+
+ @Override
+ public void readFields(DataInput dataInput) throws IOException {
+ dataInput.readFully(internalBytes, 0, 4);
+ if (hasDecimalOrSecondVInt(internalBytes[0])) {
+ dataInput.readFully(internalBytes, 4, 1);
+ int len = (byte) WritableUtils.decodeVIntSize(internalBytes[4]);
+ if (len > 1) {
+ dataInput.readFully(internalBytes, 5, len - 1);
+ }
+
+ long vlong = LazyBinaryUtils.readVLongFromByteArray(internalBytes, 4);
+ Preconditions.checkState(vlong >= -1000000000 && vlong <= 999999999,
+ "Invalid nanos value for a TimestampTZWritable: " + vlong +
+ ", expected to be between -1000000000 and 999999999.");
+ if (vlong < 0) {
+ // This indicates there is a second VInt containing the additional bits of the seconds
+ // field.
+ dataInput.readFully(internalBytes, 4 + len, 1);
+ int secondVIntLen = (byte) WritableUtils.decodeVIntSize(internalBytes[4 + len]);
+ if (secondVIntLen > 1) {
+ dataInput.readFully(internalBytes, 5 + len, secondVIntLen - 1);
+ }
+ }
+ }
+ currentBytes = internalBytes;
+ offset = 0;
+ timestampTZEmpty = true;
+ bytesEmpty = false;
+ }
+
+ public byte[] toBinarySortable() {
+ byte[] b = new byte[BINARY_SORTABLE_LENGTH];
+ int nanos = getNanos();
+ // We flip the highest-order bit of the seven-byte representation of seconds to make negative
+ // values come before positive ones.
+ long seconds = getSeconds() ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+ sevenByteLongToBytes(seconds, b, 0);
+ intToBytes(nanos, b, 7);
+ return b;
+ }
+
+ public void fromBinarySortable(byte[] bytes, int binSortOffset) {
+ // Flip the sign bit (and unused bits of the high-order byte) of the seven-byte long back.
+ long seconds = readSevenByteLong(bytes, binSortOffset) ^ SEVEN_BYTE_LONG_SIGN_FLIP;
+ int nanos = bytesToInt(bytes, binSortOffset + 7);
+ timestampTZ.set(seconds, nanos);
+ timestampTZEmpty = false;
+ bytesEmpty = true;
+ }
+
+ public void writeToByteStream(ByteStream.RandomAccessOutput byteStream) {
+ checkBytes();
+ byteStream.write(currentBytes, offset, getTotalLength());
+ }
+
+ /**
+ * Given an integer representing nanoseconds, write its serialized
+ * value to the byte array b at offset
+ *
+ * @param nanos
+ * @param b
+ * @param offset
+ * @return
+ */
+ private static boolean setNanosBytes(int nanos, byte[] b, int offset, boolean hasSecondVInt) {
+ int decimal = 0;
+ if (nanos != 0) {
+ int counter = 0;
+ while (counter < 9) {
+ decimal *= 10;
+ decimal += nanos % 10;
+ nanos /= 10;
+ counter++;
+ }
+ }
+
+ if (hasSecondVInt || decimal != 0) {
+ // We use the sign of the reversed-nanoseconds field to indicate that there is a second VInt
+ // present.
+ LazyBinaryUtils.writeVLongToByteArray(b, offset, hasSecondVInt ? (-decimal - 1) : decimal);
+ }
+ return decimal != 0;
+ }
+
+ public static void setTimestampTZ(TimestampTZ t, byte[] bytes, int offset) {
+ long seconds = getSeconds(bytes, offset);
+ int nanos = hasDecimalOrSecondVInt(bytes[offset]) ? getNanos(bytes, offset + 4) : 0;
+ t.set(seconds, nanos);
+ }
+
+ public static int getTotalLength(byte[] bytes, int offset) {
+ int len = 4;
+ if (hasDecimalOrSecondVInt(bytes[offset])) {
+ int firstVIntLen = WritableUtils.decodeVIntSize(bytes[offset + 4]);
+ len += firstVIntLen;
+ if (hasSecondVInt(bytes[offset + 4])) {
+ len += WritableUtils.decodeVIntSize(bytes[offset + 4 + firstVIntLen]);
+ }
+ }
+ return len;
+ }
+
+ public static long getSeconds(byte[] bytes, int offset) {
+ int firstVInt = bytesToInt(bytes, offset);
+ if (firstVInt >= 0 || !hasSecondVInt(bytes[offset + 4])) {
+ return firstVInt & ~DECIMAL_OR_SECOND_VINT_FLAG;
+ }
+ return ((long) (firstVInt & ~DECIMAL_OR_SECOND_VINT_FLAG)) |
+ (LazyBinaryUtils.readVLongFromByteArray(bytes,
+ offset + 4 + WritableUtils.decodeVIntSize(bytes[offset + 4])) << 31);
+ }
+
+ public static int getNanos(byte[] bytes, int offset) {
+ int val = (int) LazyBinaryUtils.readVLongFromByteArray(bytes, offset);
+ if (val < 0) {
+ val = -val - 1;
+ }
+ int len = (int) Math.floor(Math.log10(val)) + 1;
+
+ // Reverse the value
+ int tmp = 0;
+ while (val != 0) {
+ tmp *= 10;
+ tmp += val % 10;
+ val /= 10;
+ }
+ val = tmp;
+
+ if (len < 9) {
+ val *= Math.pow(10, 9 - len);
+ }
+ return val;
+ }
+
+ private static boolean hasDecimalOrSecondVInt(byte b) {
+ return b < 0;
+ }
+
+ private static boolean hasSecondVInt(byte b) {
+ return WritableUtils.isNegativeVInt(b);
+ }
+
+ /**
+ * Writes <code>value</code> into <code>dest</code> at <code>offset</code>
+ *
+ * @param value
+ * @param dest
+ * @param offset
+ */
+ private static void intToBytes(int value, byte[] dest, int offset) {
+ dest[offset] = (byte) ((value >> 24) & 0xFF);
+ dest[offset + 1] = (byte) ((value >> 16) & 0xFF);
+ dest[offset + 2] = (byte) ((value >> 8) & 0xFF);
+ dest[offset + 3] = (byte) (value & 0xFF);
+ }
+
+ /**
+ * Writes <code>value</code> into <code>dest</code> at <code>offset</code> as a seven-byte
+ * serialized long number.
+ */
+ private static void sevenByteLongToBytes(long value, byte[] dest, int offset) {
+ dest[offset] = (byte) ((value >> 48) & 0xFF);
+ dest[offset + 1] = (byte) ((value >> 40) & 0xFF);
+ dest[offset + 2] = (byte) ((value >> 32) & 0xFF);
+ dest[offset + 3] = (byte) ((value >> 24) & 0xFF);
+ dest[offset + 4] = (byte) ((value >> 16) & 0xFF);
+ dest[offset + 5] = (byte) ((value >> 8) & 0xFF);
+ dest[offset + 6] = (byte) (value & 0xFF);
+ }
+
+ /**
+ * @param bytes
+ * @param offset
+ * @return integer represented by the four bytes in <code>bytes</code>
+ * beginning at <code>offset</code>
+ */
+ private static int bytesToInt(byte[] bytes, int offset) {
+ return ((0xFF & bytes[offset]) << 24)
+ | ((0xFF & bytes[offset + 1]) << 16)
+ | ((0xFF & bytes[offset + 2]) << 8)
+ | (0xFF & bytes[offset + 3]);
+ }
+
+ private static long readSevenByteLong(byte[] bytes, int offset) {
+ // We need to shift everything 8 bits left and then shift back to populate the sign field.
+ return (((0xFFL & bytes[offset]) << 56)
+ | ((0xFFL & bytes[offset + 1]) << 48)
+ | ((0xFFL & bytes[offset + 2]) << 40)
+ | ((0xFFL & bytes[offset + 3]) << 32)
+ | ((0xFFL & bytes[offset + 4]) << 24)
+ | ((0xFFL & bytes[offset + 5]) << 16)
+ | ((0xFFL & bytes[offset + 6]) << 8)) >> 8;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/6b6a00ff/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
index 23dbe6a..2b940fd 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiv
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampTZObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyVoidObjectInspector;
import org.apache.hadoop.hive.serde2.lazydio.LazyDioBinary;
import org.apache.hadoop.hive.serde2.lazydio.LazyDioBoolean;
@@ -128,6 +129,8 @@ public final class LazyFactory {
return new LazyDate((LazyDateObjectInspector) oi);
case TIMESTAMP:
return new LazyTimestamp((LazyTimestampObjectInspector) oi);
+ case TIMESTAMPTZ:
+ return new LazyTimestampTZ((LazyTimestampTZObjectInspector) oi);
case INTERVAL_YEAR_MONTH:
return new LazyHiveIntervalYearMonth((LazyHiveIntervalYearMonthObjectInspector) oi);
case INTERVAL_DAY_TIME: