You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/06/22 17:46:49 UTC
[33/35] hive git commit: HIVE-12192: Hive should carry out timestamp
computations in UTC (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
index 0bbdce7..cb897e4 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template TimestampColumnArithmeticDateScalar.txt, which covers binary arithmetic
@@ -49,7 +49,7 @@ public class <ClassName> extends VectorExpression {
super(outputColumnNum);
this.colNum = colNum;
this.value = new Timestamp(0);
- this.value.setTime(DateWritable.daysToMillis((int) value));
+ this.value.setTime(DateWritableV2.daysToMillis((int) value));
}
public <ClassName>() {
@@ -175,7 +175,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value.toString();
+ return getColumnParamString(0, colNum) + ", val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime());
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
index 6532fcf..a67aaa5 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticTimestampScalar.txt
@@ -173,7 +173,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value.toString();
+ return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
index dce87f4..2abec36 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnCompareTimestampScalar.txt
@@ -158,7 +158,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value.toString();
+ return getColumnParamString(0, colNum) + ", val " + TimestampUtils.timestampScalarTypeToString(value);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
index 9a21cda..8b4480f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template TimestampScalarArithmeticDateColumnBase.txt.
@@ -104,7 +104,7 @@ public class <ClassName> extends VectorExpression {
if (inputColVector2.isRepeating) {
if (inputColVector2.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(0);
@@ -126,7 +126,7 @@ public class <ClassName> extends VectorExpression {
for(int j = 0; j != n; j++) {
final int i = sel[j];
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(i);;
@@ -134,7 +134,7 @@ public class <ClassName> extends VectorExpression {
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(i);
@@ -149,7 +149,7 @@ public class <ClassName> extends VectorExpression {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(i);
@@ -166,7 +166,7 @@ public class <ClassName> extends VectorExpression {
int i = sel[j];
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(i);
@@ -179,7 +179,7 @@ public class <ClassName> extends VectorExpression {
for(int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.<OperatorMethod>(
value, scratchTimestamp2, outputColVector.getScratch<CamelReturnType>());
outputColVector.setFromScratch<CamelReturnType>(i);
@@ -196,7 +196,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+ return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
index dc4f5c8..bb664cf 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
@@ -195,7 +195,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+ return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
index 1b1117e..d3bb4a0 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticTimestampColumn.txt
@@ -185,7 +185,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+ return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
index c409a6b..b99bcf9 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarCompareTimestampColumn.txt
@@ -162,7 +162,7 @@ public class <ClassName> extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+ return "val " + TimestampUtils.timestampScalarTypeToString(value) + ", " + getColumnParamString(1, colNum);
}
@Override
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
index abb7b22..810f31f 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
import com.google.common.base.Preconditions;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
index 579437e..5114cda 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
/**
* <ClassName>. Vectorized implementation for MIN/MAX aggregates.
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index 207b66f..692aba7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -318,7 +318,7 @@ public class ColumnStatsUpdateTask extends Task<ColumnStatsUpdateWork> {
private Date readDateValue(String dateStr) {
// try either yyyy-mm-dd, or integer representing days since epoch
try {
- DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr));
+ DateWritableV2 writableVal = new DateWritableV2(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr));
return new Date(writableVal.getDays());
} catch (IllegalArgumentException err) {
// Fallback to integer parsing
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index e77fe18..9ddfc06 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -289,15 +289,15 @@ public final class FunctionRegistry {
system.registerGenericUDF(UNARY_PLUS_FUNC_NAME, GenericUDFOPPositive.class);
system.registerGenericUDF(UNARY_MINUS_FUNC_NAME, GenericUDFOPNegative.class);
- system.registerUDF("day", UDFDayOfMonth.class, false);
- system.registerUDF("dayofmonth", UDFDayOfMonth.class, false);
+ system.registerGenericUDF("day", UDFDayOfMonth.class);
+ system.registerGenericUDF("dayofmonth", UDFDayOfMonth.class);
system.registerUDF("dayofweek", UDFDayOfWeek.class, false);
- system.registerUDF("month", UDFMonth.class, false);
+ system.registerGenericUDF("month", UDFMonth.class);
system.registerGenericUDF("quarter", GenericUDFQuarter.class);
- system.registerUDF("year", UDFYear.class, false);
- system.registerUDF("hour", UDFHour.class, false);
- system.registerUDF("minute", UDFMinute.class, false);
- system.registerUDF("second", UDFSecond.class, false);
+ system.registerGenericUDF("year", UDFYear.class);
+ system.registerGenericUDF("hour", UDFHour.class);
+ system.registerGenericUDF("minute", UDFMinute.class);
+ system.registerGenericUDF("second", UDFSecond.class);
system.registerUDF("from_unixtime", UDFFromUnixTime.class, false);
system.registerGenericUDF("to_date", GenericUDFDate.class);
system.registerUDF("weekofyear", UDFWeekOfYear.class, false);
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
index 58252e1..7798652 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
@@ -20,9 +20,10 @@ package org.apache.hadoop.hive.ql.exec.vector;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
public final class TimestampUtils {
@@ -30,13 +31,19 @@ public final class TimestampUtils {
static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
public static long daysToNanoseconds(long daysSinceEpoch) {
- return DateWritable.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
+ return DateWritableV2.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
}
- public static TimestampWritable timestampColumnVectorWritable(
+ public static TimestampWritableV2 timestampColumnVectorWritable(
TimestampColumnVector timestampColVector, int elementNum,
- TimestampWritable timestampWritable) {
- timestampWritable.set(timestampColVector.asScratchTimestamp(elementNum));
+ TimestampWritableV2 timestampWritable) {
+ java.sql.Timestamp ts = timestampColVector.asScratchTimestamp(elementNum);
+ if (ts == null) {
+ timestampWritable.set((Timestamp) null);
+ return timestampWritable;
+ }
+ Timestamp newTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ timestampWritable.set(newTS);
return timestampWritable;
}
@@ -46,4 +53,14 @@ public final class TimestampUtils {
intervalDayTimeWritable.set(intervalDayTimeColVector.asScratchIntervalDayTime(elementNum));
return intervalDayTimeWritable;
}
+
+ public static String timestampScalarTypeToString(Object o) {
+ if (o instanceof java.sql.Timestamp) {
+ // Special handling for timestamp
+ java.sql.Timestamp ts = (java.sql.Timestamp) o;
+ return org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
+ ts.getTime(), ts.getNanos()).toString();
+ }
+ return o.toString();
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index e96619c..d1dcad9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
@@ -38,10 +38,10 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -49,7 +49,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -174,7 +174,7 @@ public class VectorAssignRow {
((PrimitiveTypeInfo) targetTypeInfos[logicalColumnIndex]).getPrimitiveCategory();
switch (targetPrimitiveCategory) {
case DATE:
- convertTargetWritables[logicalColumnIndex] = new DateWritable();
+ convertTargetWritables[logicalColumnIndex] = new DateWritableV2();
break;
case STRING:
convertTargetWritables[logicalColumnIndex] = new Text();
@@ -414,19 +414,19 @@ public class VectorAssignRow {
case TIMESTAMP:
if (object instanceof Timestamp) {
((TimestampColumnVector) columnVector).set(
- batchIndex, ((Timestamp) object));
+ batchIndex, ((Timestamp) object).toSqlTimestamp());
} else {
((TimestampColumnVector) columnVector).set(
- batchIndex, ((TimestampWritable) object).getTimestamp());
+ batchIndex, ((TimestampWritableV2) object).getTimestamp().toSqlTimestamp());
}
break;
case DATE:
if (object instanceof Date) {
((LongColumnVector) columnVector).vector[batchIndex] =
- DateWritable.dateToDays((Date) object);
+ DateWritableV2.dateToDays((Date) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] =
- ((DateWritable) object).getDays();
+ ((DateWritableV2) object).getDays();
}
break;
case FLOAT:
@@ -711,7 +711,7 @@ public class VectorAssignRow {
return;
}
((TimestampColumnVector) columnVector).set(
- batchIndex, timestamp);
+ batchIndex, timestamp.toSqlTimestamp());
}
break;
case DATE:
@@ -722,9 +722,9 @@ public class VectorAssignRow {
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
- DateWritable dateWritable = (DateWritable) convertTargetWritable;
+ DateWritableV2 dateWritable = (DateWritableV2) convertTargetWritable;
if (dateWritable == null) {
- dateWritable = new DateWritable();
+ dateWritable = new DateWritableV2();
}
dateWritable.set(date);
((LongColumnVector) columnVector).vector[batchIndex] =
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
index 47eaf36..839e1e9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
@@ -18,9 +18,8 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Timestamp;
-
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -77,9 +76,9 @@ public class VectorBatchDebug {
} else if (colVector instanceof DecimalColumnVector) {
sb.append(((DecimalColumnVector) colVector).vector[index].toString());
} else if (colVector instanceof TimestampColumnVector) {
- Timestamp timestamp = new Timestamp(0);
+ java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
- sb.append(timestamp.toString());
+ sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
} else if (colVector instanceof IntervalDayTimeColumnVector) {
HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
sb.append(intervalDayTime.toString());
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
index c555464..39a124f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -27,9 +26,10 @@ import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -50,7 +50,6 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hive.common.util.DateUtils;
/**
* This class is used as a static factory for VectorColumnAssign.
@@ -170,10 +169,10 @@ public class VectorColumnAssignFactory {
extends VectorColumnAssignVectorBase<TimestampColumnVector> {
protected void assignTimestamp(Timestamp value, int index) {
- outCol.set(index, value);
+ outCol.set(index, value.toSqlTimestamp());
}
- protected void assignTimestamp(TimestampWritable tw, int index) {
- outCol.set(index, tw.getTimestamp());
+ protected void assignTimestamp(TimestampWritableV2 tw, int index) {
+ outCol.set(index, tw.getTimestamp().toSqlTimestamp());
}
}
@@ -342,7 +341,7 @@ public class VectorColumnAssignFactory {
assignNull(destIndex);
}
else {
- assignTimestamp((TimestampWritable) val, destIndex);
+ assignTimestamp((TimestampWritableV2) val, destIndex);
}
}
}.init(outputBatch, (TimestampColumnVector) destCol);
@@ -355,7 +354,7 @@ public class VectorColumnAssignFactory {
assignNull(destIndex);
}
else {
- DateWritable bw = (DateWritable) val;
+ DateWritableV2 bw = (DateWritableV2) val;
assignLong(bw.getDays(), destIndex);
}
}
@@ -585,7 +584,7 @@ public class VectorColumnAssignFactory {
vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.STRING);
} else if (writables[i] instanceof BytesWritable) {
vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BINARY);
- } else if (writables[i] instanceof TimestampWritable) {
+ } else if (writables[i] instanceof TimestampWritableV2) {
vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.TIMESTAMP);
} else if (writables[i] instanceof HiveIntervalYearMonthWritable) {
vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_YEAR_MONTH);
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 8ea625e..55f3556 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
@@ -31,7 +32,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -39,7 +40,6 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -556,7 +556,7 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
break;
case TIMESTAMP:
((TimestampColumnVector) colVector).set(
- batchIndex, deserializeRead.currentTimestampWritable.getTimestamp());
+ batchIndex, deserializeRead.currentTimestampWritable.getTimestamp().toSqlTimestamp());
break;
case DATE:
((LongColumnVector) colVector).vector[batchIndex] = deserializeRead.currentDateWritable.getDays();
@@ -1079,17 +1079,17 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
case TIMESTAMP:
{
if (writable == null) {
- writable = new TimestampWritable();
+ writable = new TimestampWritableV2();
}
- ((TimestampWritable) writable).set(deserializeRead.currentTimestampWritable);
+ ((TimestampWritableV2) writable).set(deserializeRead.currentTimestampWritable);
}
break;
case DATE:
{
if (writable == null) {
- writable = new DateWritable();
+ writable = new DateWritableV2();
}
- ((DateWritable) writable).set(deserializeRead.currentDateWritable);
+ ((DateWritableV2) writable).set(deserializeRead.currentDateWritable);
}
break;
case FLOAT:
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index 152d75b..f5f4d72 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -23,10 +23,12 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -36,7 +38,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -44,7 +45,6 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -237,11 +237,14 @@ public class VectorExtractRow {
((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case TIMESTAMP:
- ((TimestampWritable) primitiveWritable).set(
- ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex));
+ // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
+ java.sql.Timestamp ts =
+ ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
+ Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ ((TimestampWritableV2) primitiveWritable).set(serializableTS);
return primitiveWritable;
case DATE:
- ((DateWritable) primitiveWritable).set(
+ ((DateWritableV2) primitiveWritable).set(
(int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case FLOAT:
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index 1f46f2c..38c31a5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec.vector;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hive.common.util.Murmur3;
import java.sql.Date;
@@ -30,11 +31,9 @@ import org.apache.hadoop.hive.ql.exec.KeyWrapper;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import com.google.common.base.Preconditions;
@@ -413,7 +412,7 @@ public class VectorHashKeyWrapper extends KeyWrapper {
case DATE:
{
Date dt = new Date(0);
- dt.setTime(DateWritable.daysToMillis((int) longValues[i]));
+ dt.setTime(DateWritableV2.daysToMillis((int) longValues[i]));
sb.append(" date ");
sb.append(dt.toString());
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index cb2efb7..66585af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -22,6 +22,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -432,7 +433,10 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
serializeWrite.writeDate((int) ((LongColumnVector) colVector).vector[adjustedBatchIndex]);
break;
case TIMESTAMP:
- serializeWrite.writeTimestamp(((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex));
+ // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
+ java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex);
+ Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ serializeWrite.writeTimestamp(serializableTS);
break;
case FLOAT:
serializeWrite.writeFloat((float) ((DoubleColumnVector) colVector).vector[adjustedBatchIndex]);
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 84a0a3a..5631347 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector;
import java.lang.reflect.Constructor;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
@@ -35,6 +34,7 @@ import java.util.TreeSet;
import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
@@ -69,7 +69,7 @@ import org.apache.hadoop.hive.ql.udf.*;
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -1392,9 +1392,10 @@ public class VectorizationContext {
case INT_FAMILY:
return new ConstantVectorExpression(outCol, ((Number) constantValue).longValue(), typeInfo);
case DATE:
- return new ConstantVectorExpression(outCol, DateWritable.dateToDays((Date) constantValue), typeInfo);
+ return new ConstantVectorExpression(outCol, DateWritableV2.dateToDays((Date) constantValue), typeInfo);
case TIMESTAMP:
- return new ConstantVectorExpression(outCol, (Timestamp) constantValue, typeInfo);
+ return new ConstantVectorExpression(outCol,
+ ((org.apache.hadoop.hive.common.type.Timestamp) constantValue).toSqlTimestamp(), typeInfo);
case INTERVAL_YEAR_MONTH:
return new ConstantVectorExpression(outCol,
((HiveIntervalYearMonth) constantValue).getTotalMonths(), typeInfo);
@@ -2192,10 +2193,10 @@ public class VectorizationContext {
return InConstantType.INT_FAMILY;
case DATE:
- return InConstantType.TIMESTAMP;
+ return InConstantType.DATE;
case TIMESTAMP:
- return InConstantType.DATE;
+ return InConstantType.TIMESTAMP;
case FLOAT:
case DOUBLE:
@@ -2801,6 +2802,8 @@ public class VectorizationContext {
return createVectorExpression(CastDecimalToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -2829,6 +2832,8 @@ public class VectorizationContext {
return createVectorExpression(CastDecimalToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -2857,6 +2862,8 @@ public class VectorizationContext {
return createVectorExpression(CastDecimalToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -3513,7 +3520,9 @@ public class VectorizationContext {
Object scalarValue = getScalarValue(constDesc);
switch (primitiveCategory) {
case DATE:
- return new Long(DateWritable.dateToDays((Date) scalarValue));
+ return new Long(DateWritableV2.dateToDays((Date) scalarValue));
+ case TIMESTAMP:
+ return ((org.apache.hadoop.hive.common.type.Timestamp) scalarValue).toSqlTimestamp();
case INTERVAL_YEAR_MONTH:
return ((HiveIntervalYearMonth) scalarValue).getTotalMonths();
default:
@@ -3558,10 +3567,10 @@ public class VectorizationContext {
Object constant = evaluator.evaluate(null);
Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
- if (!(java instanceof Timestamp)) {
+ if (!(java instanceof org.apache.hadoop.hive.common.type.Timestamp)) {
throw new HiveException("Udf: failed to convert to timestamp");
}
- Timestamp ts = (Timestamp) java;
+ Timestamp ts = ((org.apache.hadoop.hive.common.type.Timestamp) java).toSqlTimestamp();
return ts;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index d51d44a..211f452 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -19,29 +19,22 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.LinkedList;
import java.util.List;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -49,19 +42,15 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -70,7 +59,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataOutputBuffer;
@@ -79,7 +67,8 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hive.common.util.DateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class VectorizedBatchUtil {
private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class);
@@ -378,7 +367,7 @@ public class VectorizedBatchUtil {
case DATE: {
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
- lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
+ lcv.vector[rowIndex] = ((DateWritableV2) writableCol).getDays();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
@@ -411,7 +400,7 @@ public class VectorizedBatchUtil {
case TIMESTAMP: {
TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
- lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
+ lcv.set(rowIndex, ((TimestampWritableV2) writableCol).getTimestamp().toSqlTimestamp());
lcv.isNull[rowIndex] = false;
} else {
lcv.setNullValue(rowIndex);
@@ -744,7 +733,7 @@ public class VectorizedBatchUtil {
if (sourceColVector.noNulls) {
for (int i = 0; i < size; i++) {
targetTime[i] = sourceTime[i];
- targetNanos[i] = targetNanos[i];
+ targetNanos[i] = sourceNanos[i];
}
} else {
boolean[] sourceIsNull = sourceColVector.isNull;
@@ -753,7 +742,7 @@ public class VectorizedBatchUtil {
for (int i = 0; i < size; i++) {
if (!sourceIsNull[i]) {
targetTime[i] = sourceTime[i];
- targetNanos[i] = targetNanos[i];
+ targetNanos[i] = sourceNanos[i];
} else {
targetTime[i] = 0;
targetNanos[i] = 0;
@@ -899,9 +888,9 @@ public class VectorizedBatchUtil {
case LONG:
return new LongWritable(0);
case TIMESTAMP:
- return new TimestampWritable(new Timestamp(0));
+ return new TimestampWritableV2(new Timestamp());
case DATE:
- return new DateWritable(new Date(0));
+ return new DateWritableV2(new Date());
case FLOAT:
return new FloatWritable(0);
case DOUBLE:
@@ -976,9 +965,9 @@ public class VectorizedBatchUtil {
} else if (colVector instanceof DecimalColumnVector) {
sb.append(((DecimalColumnVector) colVector).vector[index].toString());
} else if (colVector instanceof TimestampColumnVector) {
- Timestamp timestamp = new Timestamp(0);
+ java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
- sb.append(timestamp.toString());
+ sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
} else if (colVector instanceof IntervalDayTimeColumnVector) {
HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
sb.append(intervalDayTime.toString());
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index ffbfb6f..ca17d4b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -18,45 +18,38 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
-import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hive.common.util.DateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
@@ -487,7 +480,7 @@ public class VectorizedRowBatchCtx {
lcv.isNull[0] = true;
lcv.isRepeating = true;
} else {
- lcv.fill(DateWritable.dateToDays((Date) value));
+ lcv.fill(DateWritableV2.dateToDays((Date) value));
}
}
break;
@@ -499,7 +492,7 @@ public class VectorizedRowBatchCtx {
lcv.isNull[0] = true;
lcv.isRepeating = true;
} else {
- lcv.fill((Timestamp) value);
+ lcv.fill(((Timestamp) value).toSqlTimestamp());
}
}
break;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
index e559886..dfa9f8a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
@@ -19,20 +19,27 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import java.sql.Date;
+import java.text.SimpleDateFormat;
+import java.util.TimeZone;
public class CastDateToString extends LongToStringUnaryUDF {
private static final long serialVersionUID = 1L;
protected transient Date dt = new Date(0);
+ private transient SimpleDateFormat formatter;
public CastDateToString() {
super();
+ formatter = new SimpleDateFormat("yyyy-MM-dd");
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
public CastDateToString(int inputColumn, int outputColumnNum) {
super(inputColumn, outputColumnNum);
+ formatter = new SimpleDateFormat("yyyy-MM-dd");
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
// The assign method will be overridden for CHAR and VARCHAR.
@@ -42,8 +49,8 @@ public class CastDateToString extends LongToStringUnaryUDF {
@Override
protected void func(BytesColumnVector outV, long[] vector, int i) {
- dt.setTime(DateWritable.daysToMillis((int) vector[i]));
- byte[] temp = dt.toString().getBytes();
+ dt.setTime(DateWritableV2.daysToMillis((int) vector[i]));
+ byte[] temp = formatter.format(dt).getBytes();
assign(outV, i, temp, temp.length);
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
index dbd7c01..37a48e6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
public class CastDateToTimestamp extends VectorExpression {
private static final long serialVersionUID = 1L;
@@ -45,7 +45,7 @@ public class CastDateToTimestamp extends VectorExpression {
}
private void setDays(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
- timestampColVector.getScratchTimestamp().setTime(DateWritable.daysToMillis((int) vector[elementNum]));
+ timestampColVector.getScratchTimestamp().setTime(DateWritableV2.daysToMillis((int) vector[elementNum]));
timestampColVector.setFromScratchTimestamp(elementNum);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
index b2185d9..f99bd69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
index 9e0c00e..ebd14e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
@@ -20,10 +20,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
public class CastLongToTimestamp extends VectorExpression {
private static final long serialVersionUID = 1L;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
index a9ede6b..bea2240 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
@@ -20,10 +20,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
public class CastMillisecondsLongToTimestamp extends VectorExpression {
private static final long serialVersionUID = 1L;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
index 6edd7b9..a6dff12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
@@ -18,13 +18,13 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hive.common.util.DateParser;
import java.nio.charset.StandardCharsets;
@@ -38,7 +38,6 @@ public class CastStringToDate extends VectorExpression {
private final int inputColumn;
- private transient final java.sql.Date sqlDate = new java.sql.Date(0);
private transient final DateParser dateParser = new DateParser();
public CastStringToDate() {
@@ -154,8 +153,9 @@ public class CastStringToDate extends VectorExpression {
private void evaluate(LongColumnVector outputColVector, BytesColumnVector inV, int i) {
String dateString = new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
- if (dateParser.parseDate(dateString, sqlDate)) {
- outputColVector.vector[i] = DateWritable.dateToDays(sqlDate);
+ Date hDate = new Date();
+ if (dateParser.parseDate(dateString, hDate)) {
+ outputColVector.vector[i] = DateWritableV2.dateToDays(hDate);
return;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
new file mode 100644
index 0000000..d363ad9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+
+public class CastTimestampToChar extends CastTimestampToString implements TruncStringOutput {
+
+ private static final long serialVersionUID = 1L;
+ private int maxLength; // Must be manually set with setMaxLength.
+
+ public CastTimestampToChar() {
+ super();
+ }
+
+ public CastTimestampToChar(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ @Override
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength);
+ }
+
+ @Override
+ public int getMaxLength() {
+ return maxLength;
+ }
+
+ @Override
+ public void setMaxLength(int maxLength) {
+ this.maxLength = maxLength;
+ }
+
+ public String vectorExpressionParameters() {
+ return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
index 6a41bb0..ba7e91a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* To be used to cast timestamp to decimal.
@@ -40,6 +40,6 @@ public class CastTimestampToDate extends FuncTimestampToLong {
@Override
protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
- outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
+ outV.vector[i] = DateWritableV2.millisToDays(inV.getTime(i));
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index ebe18a9..92aade0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
/**
* To be used to cast timestamp to decimal.
@@ -39,6 +40,8 @@ public class CastTimestampToDecimal extends FuncTimestampToDecimal {
@Override
protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
- outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
+ Double timestampDouble = TimestampUtils.getDouble(inV.asScratchTimestamp(i));
+ HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
+ outV.set(i, HiveDecimal.create(timestampDouble.toString()));
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
index eedde7a..bf5e105 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -20,10 +20,8 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
public class CastTimestampToDouble extends VectorExpression {
private static final long serialVersionUID = 1L;
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
new file mode 100644
index 0000000..0e20cf1
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.temporal.ChronoField;
+
+public class CastTimestampToString extends TimestampToStringUnaryUDF {
+ private static final long serialVersionUID = 1L;
+ protected transient Timestamp dt = new Timestamp(0);
+ private static final DateTimeFormatter PRINT_FORMATTER;
+
+ static {
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+ // Date and time parts
+ builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
+ // Fractional part
+ builder.optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd();
+ PRINT_FORMATTER = builder.toFormatter();
+ }
+
+ public CastTimestampToString() {
+ super();
+ }
+
+ public CastTimestampToString(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ // The assign method will be overridden for CHAR and VARCHAR.
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ outV.setVal(i, bytes, 0, length);
+ }
+
+ @Override
+ protected void func(BytesColumnVector outV, TimestampColumnVector inV, int i) {
+ dt.setTime(inV.time[i]);
+ dt.setNanos(inV.nanos[i]);
+ byte[] temp = LocalDateTime.ofInstant(Instant.ofEpochMilli(inV.time[i]), ZoneOffset.UTC)
+ .withNano(inV.nanos[i])
+ .format(PRINT_FORMATTER).getBytes();
+ assign(outV, i, temp, temp.length);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
new file mode 100644
index 0000000..da740fa
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+
+public class CastTimestampToVarChar extends CastTimestampToString implements TruncStringOutput {
+
+ private static final long serialVersionUID = 1L;
+ private int maxLength; // Must be manually set with setMaxLength.
+
+ public CastTimestampToVarChar() {
+ super();
+ }
+
+ public CastTimestampToVarChar(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ @Override
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ StringExpr.truncate(outV, i, bytes, 0, length, maxLength);
+ }
+
+ @Override
+ public int getMaxLength() {
+ return maxLength;
+ }
+
+ @Override
+ public void setMaxLength(int maxLength) {
+ this.maxLength = maxLength;
+ }
+
+ @Override
+ public String vectorExpressionParameters() {
+ return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
index 4cc1be5..8ae8a54 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
@@ -307,7 +307,8 @@ public class ConstantVectorExpression extends VectorExpression {
value = decimalValue.toString();
break;
case TIMESTAMP:
- value = timestampValue.toString();
+ value = org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
+ timestampValue.getTime(), timestampValue.getNanos()).toString();
break;
case INTERVAL_DAY_TIME:
value = intervalDayTimeValue.toString();
http://git-wip-us.apache.org/repos/asf/hive/blob/c95136a0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index d963b87..79d1ad1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -20,14 +20,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
import java.sql.Timestamp;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
// A type date (LongColumnVector storing epoch days) minus a type date produces a
// type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
@@ -96,38 +93,38 @@ public class DateColSubtractDateColumn extends VectorExpression {
* conditional checks in the inner loop.
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(0);
} else if (inputColVector1.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
}
} else if (inputColVector2.isRepeating) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -136,15 +133,15 @@ public class DateColSubtractDateColumn extends VectorExpression {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}