You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by vg...@apache.org on 2018/08/10 17:14:35 UTC
hive git commit: HIVE-20340: Druid Needs Explicit CASTs from
Timestamp to STRING when the output of timestamp function is used as
String(Vineet Garg, reviewed by Ashutosh Chauhan)
Repository: hive
Updated Branches:
refs/heads/master 81dff07cb -> 4938c9ca1
HIVE-20340: Druid Needs Explicit CASTs from Timestamp to STRING when the output of timestamp function is used as String(Vineet Garg, reviewed by Ashutosh Chauhan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4938c9ca
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4938c9ca
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4938c9ca
Branch: refs/heads/master
Commit: 4938c9ca15247899cca1f7f8782f56741207db36
Parents: 81dff07
Author: Vineet Garg <vg...@apache.org>
Authored: Fri Aug 10 10:13:47 2018 -0700
Committer: Vineet Garg <vg...@apache.org>
Committed: Fri Aug 10 10:13:47 2018 -0700
----------------------------------------------------------------------
.../hadoop/hive/cli/control/CliConfigs.java | 2 +-
.../hive/ql/parse/TypeCheckProcFactory.java | 13 ++++
.../hadoop/hive/ql/plan/ExprNodeDescUtils.java | 16 +++++
.../queries/clientpositive/druidmini_test1.q | 7 ++
.../clientpositive/druid/druidmini_test1.q.out | 74 ++++++++++++++++++++
.../clientpositive/infer_join_preds.q.out | 2 +-
.../results/clientpositive/interval_alt.q.out | 2 +-
.../clientpositive/spark/union_remove_22.q.out | 4 +-
.../results/clientpositive/union_offcbo.q.out | 20 +++---
.../clientpositive/union_remove_22.q.out | 4 +-
10 files changed, 127 insertions(+), 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
index e7416b1..92919e9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java
@@ -178,7 +178,7 @@ public class CliConfigs {
excludeQuery("druid_timestamptz.q"); // Disabled in HIVE-20322
excludeQuery("druidmini_joins.q"); // Disabled in HIVE-20322
excludeQuery("druidmini_masking.q"); // Disabled in HIVE-20322
- excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322
+ //excludeQuery("druidmini_test1.q"); // Disabled in HIVE-20322
setResultsDir("ql/src/test/results/clientpositive/druid");
setLogDir("itests/qtest/target/tmp/log");
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 1a86294..a8c9832 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -992,6 +992,17 @@ public class TypeCheckProcFactory {
}
}
+ protected void insertCast(String funcText, ArrayList<ExprNodeDesc> children) throws SemanticException {
+ // substring, concat UDFs expect first argument as string. Therefore this method inserts explicit cast
+ // to cast the first operand to string
+ if (funcText.equals("substring") || funcText.equals("concat")){
+ if(children.size() > 0 && !ExprNodeDescUtils.isStringType(children.get(0))) {
+ ExprNodeDesc newColumn = ParseUtils.createConversionCast(children.get(0), TypeInfoFactory.stringTypeInfo);
+ children.set(0, newColumn);
+ }
+ }
+ }
+
protected ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr,
boolean isFunction, ArrayList<ExprNodeDesc> children, TypeCheckCtx ctx)
throws SemanticException, UDFArgumentException {
@@ -1128,6 +1139,8 @@ public class TypeCheckProcFactory {
}
}
+ insertCast(funcText, children);
+
validateUDF(expr, isFunction, ctx, fi, children, genericUDF);
// Try to infer the type of the constant only if there are two
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
index a87fa27..5275561 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
@@ -38,7 +38,9 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -935,4 +937,18 @@ public class ExprNodeDescUtils {
}
return true;
}
+
+ // Given an expression this method figures out of the type for the expression belongs to string group
+ // e.g. (String, Char, Varchar etc)
+ public static boolean isStringType(ExprNodeDesc expr) {
+ TypeInfo typeInfo = expr.getTypeInfo();
+ if (typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+ PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
+ if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(primitiveCategory) ==
+ PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP) {
+ return true;
+ }
+ }
+ return false;
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/queries/clientpositive/druidmini_test1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_test1.q b/ql/src/test/queries/clientpositive/druidmini_test1.q
index 30abf3c..f53cc05 100644
--- a/ql/src/test/queries/clientpositive/druidmini_test1.q
+++ b/ql/src/test/queries/clientpositive/druidmini_test1.q
@@ -128,3 +128,10 @@ WHERE (`__time` BETWEEN '1968-01-01 00:00:00' AND '1970-01-01 00:00:00')
-- this patch https://github.com/druid-io/druid/commit/219e77aeac9b07dc20dd9ab2dd537f3f17498346
explain select (cstring1 is null ) AS is_null, (cint is not null ) as isnotnull FROM druid_table_n3;
+
+explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5;
+select substring(to_date(`__time`), 4) from druid_table_n3 limit 5;
+
+explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5;
+select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5;
+
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
index 45b9d78..6f85515 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_test1.q.out
@@ -839,3 +839,77 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
ListSink
+PREHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select substring(to_date(`__time`), 4) from druid_table_n3 limit 5
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: druid_table_n3
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes string
+ druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table_n3
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select substring(to_date(`__time`), 4) from druid_table_n3 limit 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table_n3
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+9-12-31
+9-12-31
+9-12-31
+9-12-31
+9-12-31
+PREHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: druid_table_n3
+ properties:
+ druid.fieldNames vc
+ druid.fieldTypes string
+ druid.query.json {"queryType":"scan","dataSource":"default.druid_table_n3","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','UTC'), 3, -1)","outputType":"STRING"}],"columns":["vc"],"resultFormat":"compactedList","limit":5}
+ druid.query.type scan
+ Select Operator
+ expressions: vc (type: string)
+ outputColumnNames: _col0
+ ListSink
+
+PREHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table_n3
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select substring(cast(to_date(`__time`) as string), 4) from druid_table_n3 limit 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table_n3
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+9-12-31
+9-12-31
+9-12-31
+9-12-31
+9-12-31
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/infer_join_preds.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/infer_join_preds.q.out b/ql/src/test/results/clientpositive/infer_join_preds.q.out
index 6d2ec57..57f6511 100644
--- a/ql/src/test/results/clientpositive/infer_join_preds.q.out
+++ b/ql/src/test/results/clientpositive/infer_join_preds.q.out
@@ -1184,7 +1184,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38
Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Filter Operator
- predicate: (concat(_col1, ',', CASE WHEN (_col2 is null) THEN (1) ELSE (_col2) END, ',', CASE WHEN (_col3 is null) THEN (1) ELSE (_col3) END, ',', CASE WHEN (_col4 is null) THEN (1) ELSE (_col4) END, ',', CASE WHEN (_col5 is null) THEN ('') ELSE (_col5) END, ',', CASE WHEN (_col18 is null) THEN (1) ELSE (_col18) END, ',', CASE WHEN (_col6 is null) THEN (1) ELSE (_col6) END, ',', CASE WHEN (length(_col7) is null) THEN ('') ELSE (_col7) END, ',', CASE WHEN (_col8 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col8) END, ',', CASE WHEN (_col9 is null) THEN (1) ELSE (_col9) END, ',', CASE WHEN (_col10 is null) THEN (1) ELSE (_col10) END, ',', CASE WHEN (_col11 is null) THEN (1) ELSE (_col11) END, ',', CASE WHEN (_col12 is null) THEN (1) ELSE (_col12) END, ',', CASE WHEN (length(_col13) is null) THEN ('') ELSE (_col13) END, ',', CASE WHEN (length(_col14) is null) THEN ('') ELSE (_col14) END, ',', CASE WHEN (_col15 is null) THEN (1) ELSE (_col15) END, ',', CASE WHEN (_c
ol16 is null) THEN (1) ELSE (_col16) END, ',', CASE WHEN (_col17 is null) THEN (1) ELSE (_col17) END) <> concat(CASE WHEN (length(_col20) is null) THEN ('') ELSE (_col20) END, ',', _col21, ',', CASE WHEN (_col22 is null) THEN (1) ELSE (_col22) END, ',', CASE WHEN (_col23 is null) THEN (1) ELSE (_col23) END, ',', CASE WHEN (_col24 is null) THEN (1) ELSE (_col24) END, ',', CASE WHEN (_col25 is null) THEN ('') ELSE (_col25) END, ',', CASE WHEN (_col38 is null) THEN (1) ELSE (_col38) END, ',', CASE WHEN (_col26 is null) THEN (1) ELSE (_col26) END, ',', CASE WHEN (length(_col27) is null) THEN ('') ELSE (_col27) END, ',', CASE WHEN (_col28 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col28) END, ',', CASE WHEN (_col29 is null) THEN (1) ELSE (_col29) END, ',', CASE WHEN (_col30 is null) THEN (1) ELSE (_col30) END, ',', CASE WHEN (_col31 is null) THEN (1) ELSE (_col31) END, ',', CASE WHEN (_col32 is null) THEN (1) ELSE (_col32) END, ',', CASE WHEN (length(_col33) is null) THEN ('')
ELSE (_col33) END, ',', CASE WHEN (length(_col34) is null) THEN ('') ELSE (_col34) END, ',', CASE WHEN (_col35 is null) THEN (1) ELSE (_col35) END, ',', CASE WHEN (_col36 is null) THEN (1) ELSE (_col36) END, ',', CASE WHEN (_col37 is null) THEN (1) ELSE (_col37) END)) (type: boolean)
+ predicate: (concat(CAST( _col1 AS STRING), ',', CASE WHEN (_col2 is null) THEN (1) ELSE (_col2) END, ',', CASE WHEN (_col3 is null) THEN (1) ELSE (_col3) END, ',', CASE WHEN (_col4 is null) THEN (1) ELSE (_col4) END, ',', CASE WHEN (_col5 is null) THEN ('') ELSE (_col5) END, ',', CASE WHEN (_col18 is null) THEN (1) ELSE (_col18) END, ',', CASE WHEN (_col6 is null) THEN (1) ELSE (_col6) END, ',', CASE WHEN (length(_col7) is null) THEN ('') ELSE (_col7) END, ',', CASE WHEN (_col8 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col8) END, ',', CASE WHEN (_col9 is null) THEN (1) ELSE (_col9) END, ',', CASE WHEN (_col10 is null) THEN (1) ELSE (_col10) END, ',', CASE WHEN (_col11 is null) THEN (1) ELSE (_col11) END, ',', CASE WHEN (_col12 is null) THEN (1) ELSE (_col12) END, ',', CASE WHEN (length(_col13) is null) THEN ('') ELSE (_col13) END, ',', CASE WHEN (length(_col14) is null) THEN ('') ELSE (_col14) END, ',', CASE WHEN (_col15 is null) THEN (1) ELSE (_col15) END, '
,', CASE WHEN (_col16 is null) THEN (1) ELSE (_col16) END, ',', CASE WHEN (_col17 is null) THEN (1) ELSE (_col17) END) <> concat(CASE WHEN (length(_col20) is null) THEN ('') ELSE (_col20) END, ',', _col21, ',', CASE WHEN (_col22 is null) THEN (1) ELSE (_col22) END, ',', CASE WHEN (_col23 is null) THEN (1) ELSE (_col23) END, ',', CASE WHEN (_col24 is null) THEN (1) ELSE (_col24) END, ',', CASE WHEN (_col25 is null) THEN ('') ELSE (_col25) END, ',', CASE WHEN (_col38 is null) THEN (1) ELSE (_col38) END, ',', CASE WHEN (_col26 is null) THEN (1) ELSE (_col26) END, ',', CASE WHEN (length(_col27) is null) THEN ('') ELSE (_col27) END, ',', CASE WHEN (_col28 is null) THEN (TIMESTAMP'2017-12-08 00:00:00') ELSE (_col28) END, ',', CASE WHEN (_col29 is null) THEN (1) ELSE (_col29) END, ',', CASE WHEN (_col30 is null) THEN (1) ELSE (_col30) END, ',', CASE WHEN (_col31 is null) THEN (1) ELSE (_col31) END, ',', CASE WHEN (_col32 is null) THEN (1) ELSE (_col32) END, ',', CASE WHEN (length(_col33) i
s null) THEN ('') ELSE (_col33) END, ',', CASE WHEN (length(_col34) is null) THEN ('') ELSE (_col34) END, ',', CASE WHEN (_col35 is null) THEN (1) ELSE (_col35) END, ',', CASE WHEN (_col36 is null) THEN (1) ELSE (_col36) END, ',', CASE WHEN (_col37 is null) THEN (1) ELSE (_col37) END)) (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
expressions: _col0 (type: bigint)
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/interval_alt.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/interval_alt.q.out b/ql/src/test/results/clientpositive/interval_alt.q.out
index ca645b9..6adf4cc 100644
--- a/ql/src/test/results/clientpositive/interval_alt.q.out
+++ b/ql/src/test/results/clientpositive/interval_alt.q.out
@@ -139,7 +139,7 @@ STAGE PLANS:
alias: t_n18
Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (DATE'2012-01-01' + IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), (DATE'2012-01-01' - IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), TIMESTAMP'2012-01-04 00:00:00' (type: timestamp), (DATE'2012-01-01' + IntervalYearMonthLiteralProcessor(concat(dt, '-1'))) (type: date)
+ expressions: (DATE'2012-01-01' + IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), (DATE'2012-01-01' - IntervalDayLiteralProcessor(((- dt) * dt))) (type: timestamp), TIMESTAMP'2012-01-04 00:00:00' (type: timestamp), (DATE'2012-01-01' + IntervalYearMonthLiteralProcessor(concat(CAST( dt AS STRING), '-1'))) (type: date)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/spark/union_remove_22.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union_remove_22.q.out b/ql/src/test/results/clientpositive/spark/union_remove_22.q.out
index 1d23016..2a0a22a 100644
--- a/ql/src/test/results/clientpositive/spark/union_remove_22.q.out
+++ b/ql/src/test/results/clientpositive/spark/union_remove_22.q.out
@@ -263,7 +263,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), UDFToLong(concat(_col1, _col1)) (type: bigint), UDFToLong(concat(_col1, _col1)) (type: bigint)
+ expressions: _col0 (type: string), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 2 Data size: 600 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -284,7 +284,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), UDFToLong(concat(_col1, _col1)) (type: bigint), UDFToLong(concat(_col1, _col1)) (type: bigint)
+ expressions: _col0 (type: string), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 2 Data size: 600 Basic stats: COMPLETE Column stats: NONE
File Output Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/union_offcbo.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_offcbo.q.out b/ql/src/test/results/clientpositive/union_offcbo.q.out
index 54c4edf..94d197f 100644
--- a/ql/src/test/results/clientpositive/union_offcbo.q.out
+++ b/ql/src/test/results/clientpositive/union_offcbo.q.out
@@ -252,7 +252,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -344,7 +344,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1, _col6, _col7, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -593,7 +593,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Filter Operator
@@ -688,7 +688,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: id1 (type: bigint), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: id1 (type: bigint), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Filter Operator
@@ -940,7 +940,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -1046,7 +1046,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1, _col6, _col7, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -1294,7 +1294,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -1398,7 +1398,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: id1 (type: bigint), '2015-11-20' (type: string), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1, _col6, _col7, _col8, _col9
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Reduce Output Operator
@@ -1645,7 +1645,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Filter Operator
@@ -1738,7 +1738,7 @@ STAGE PLANS:
predicate: (ts1 = '2015-11-20') (type: boolean)
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Select Operator
- expressions: id1 (type: bigint), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(id1)) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(at1)) (type: string)
+ expressions: id1 (type: bigint), sts (type: string), at1 (type: bigint), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( id1 AS STRING))) (type: string), reflect('org.apache.commons.codec.digest.DigestUtils','sha256Hex',concat(CAST( at1 AS STRING))) (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
Filter Operator
http://git-wip-us.apache.org/repos/asf/hive/blob/4938c9ca/ql/src/test/results/clientpositive/union_remove_22.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_remove_22.q.out b/ql/src/test/results/clientpositive/union_remove_22.q.out
index 83217fa..89b3f71 100644
--- a/ql/src/test/results/clientpositive/union_remove_22.q.out
+++ b/ql/src/test/results/clientpositive/union_remove_22.q.out
@@ -271,7 +271,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), UDFToLong(concat(_col1, _col1)) (type: bigint), UDFToLong(concat(_col1, _col1)) (type: bigint)
+ expressions: _col0 (type: string), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -324,7 +324,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), UDFToLong(concat(_col1, _col1)) (type: bigint), UDFToLong(concat(_col1, _col1)) (type: bigint)
+ expressions: _col0 (type: string), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint), UDFToLong(concat(CAST( _col1 AS STRING), _col1)) (type: bigint)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE
File Output Operator