You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2018/06/13 15:07:11 UTC

hive git commit: HIVE-19869 : Remove double formatting bug followup of HIVE19382

Repository: hive
Updated Branches:
  refs/heads/master 16f57220f -> 7b7f5a3c9


HIVE-19869 : Remove double formatting bug followup of HIVE19382

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7b7f5a3c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7b7f5a3c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7b7f5a3c

Branch: refs/heads/master
Commit: 7b7f5a3c9f295a888bc836d37774a13e6b55b30a
Parents: 16f5722
Author: Slim Bouguerra <sl...@gmail.com>
Authored: Wed Jun 13 10:06:23 2018 -0500
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Wed Jun 13 10:06:23 2018 -0500

----------------------------------------------------------------------
 .../ql/parse/DruidSqlOperatorConverter.java     | 21 ++++++----
 .../clientpositive/druidmini_expressions.q      |  8 ++++
 .../druid/druidmini_expressions.q.out           | 42 ++++++++++++++++++++
 3 files changed, 64 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7b7f5a3c/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java
index 2b1d0d3..4db0714 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DruidSqlOperatorConverter.java
@@ -95,8 +95,12 @@ public class DruidSqlOperatorConverter {
       druidOperatorMap.put(HiveToDateSqlOperator.INSTANCE, new DruidToDateOperatorConversion());
       druidOperatorMap.put(HiveFromUnixTimeSqlOperator.INSTANCE, new DruidFormUnixTimeOperatorConversion());
       druidOperatorMap.put(HiveUnixTimestampSqlOperator.INSTANCE, new DruidUnixTimestampOperatorConversion());
-      druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE));
-      druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE));
+      druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE,
+          new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE)
+      );
+      druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE,
+          new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE)
+      );
     }
     return druidOperatorMap;
   }
@@ -254,7 +258,8 @@ public class DruidSqlOperatorConverter {
       // dealing with String type
       final String format = call.getOperands().size() == 2 ? DruidExpressions
           .toDruidExpression(call.getOperands().get(1), rowType, query) : DEFAULT_TS_FORMAT;
-      return DruidExpressions.functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format)));
+      return DruidExpressions
+          .functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format)));
     }
   }
 
@@ -277,9 +282,12 @@ public class DruidSqlOperatorConverter {
       }
 
       final String numMillis = DruidQuery.format("(%s * '1000')", arg);
-      final String format = call.getOperands().size() == 1 ? DEFAULT_TS_FORMAT : DruidExpressions
-          .toDruidExpression(call.getOperands().get(1), rowType, query);
-      return applyTimestampFormat(numMillis, format, timezoneId(query));
+      final String format =
+          call.getOperands().size() == 1 ? DruidExpressions.stringLiteral(DEFAULT_TS_FORMAT) : DruidExpressions
+              .toDruidExpression(call.getOperands().get(1), rowType, query);
+      return DruidExpressions.functionCall("timestamp_format",
+          ImmutableList.of(numMillis, format, DruidExpressions.stringLiteral(timezoneId(query).getID()))
+      );
     }
   }
 
@@ -322,7 +330,6 @@ public class DruidSqlOperatorConverter {
     }
   }
 
-
   /**
    * utility function to extract timezone id from Druid query
    * @param query Druid Rel

http://git-wip-us.apache.org/repos/asf/hive/blob/7b7f5a3c/ql/src/test/queries/clientpositive/druidmini_expressions.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_expressions.q b/ql/src/test/queries/clientpositive/druidmini_expressions.q
index f28b391..fad8f73 100644
--- a/ql/src/test/queries/clientpositive/druidmini_expressions.q
+++ b/ql/src/test/queries/clientpositive/druidmini_expressions.q
@@ -87,6 +87,14 @@ select unix_timestamp(from_unixtime(1396681200)) from druid_table_n0 limit 1;
 explain select unix_timestamp(`__time`) from druid_table_n0 limit 1;
 select unix_timestamp(`__time`) from druid_table_n0 limit 1;
 
+explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss');
+
+select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss');
+
 explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY');
 select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY');
 select TRUNC(cast(`__time` as timestamp), 'YEAR') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YEAR');

http://git-wip-us.apache.org/repos/asf/hive/blob/7b7f5a3c/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
index 8ccdeaa..b34f47b 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_expressions.q.out
@@ -955,6 +955,48 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@druid_table_n0
 POSTHOOK: Output: hdfs://### HDFS PATH ###
 -60
+PREHOOK: query: explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table_n0
+          properties:
+            druid.fieldNames vc
+            druid.fieldTypes string
+            druid.query.json {"queryType":"groupBy","dataSource":"default.druid_table_n0","granularity":"all","dimensions":[{"type":"default","dimension":"vc","outputName":"vc","outputType":"STRING"}],"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_format((div(\"__time\",1000) * '1000'),'yyyy-MM-dd HH:mm:ss','US/Pacific')","outputType":"STRING"}],"limitSpec":{"type":"default"},"aggregations":[],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
+            druid.query.type groupBy
+          Select Operator
+            expressions: vc (type: string)
+            outputColumnNames: _col0
+            ListSink
+
+PREHOOK: query: select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table_n0
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select FROM_UNIXTIME(UNIX_TIMESTAMP (CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+from druid_table_n0
+GROUP BY FROM_UNIXTIME(UNIX_TIMESTAMP(CAST(`__time` as timestamp ),'yyyy-MM-dd HH:mm:ss' ),'yyyy-MM-dd HH:mm:ss')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table_n0
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969-12-31 15:59:00
+1969-12-31 16:00:00
 PREHOOK: query: explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY')
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select TRUNC(cast(`__time` as timestamp), 'YY') from druid_table_n0 GROUP BY TRUNC(cast(`__time` as timestamp), 'YY')