You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/07/20 19:24:39 UTC

[1/2] hive git commit: HIVE-19990: Query with interval literal in join condition fails(Vineet Garg, reviewed by Zoltan Haindrich)

Repository: hive
Updated Branches:
  refs/heads/master-txnstats cdb32a7fb -> bdd3cec1f


HIVE-19990: Query with interval literal in join condition fails(Vineet Garg, reviewed by Zoltan Haindrich)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/06a4f98e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/06a4f98e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/06a4f98e

Branch: refs/heads/master-txnstats
Commit: 06a4f98e7400b0ab11d8361195dcc9c0f0d7f01d
Parents: 851c8ab
Author: Vineet Garg <vg...@apache.org>
Authored: Fri Jul 20 11:01:55 2018 -0700
Committer: Vineet Garg <vg...@apache.org>
Committed: Fri Jul 20 11:01:55 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  10 ++
 ql/src/test/queries/clientpositive/interval_3.q |  23 ++++
 .../results/clientpositive/interval_3.q.out     | 111 +++++++++++++++++++
 3 files changed, 144 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/06a4f98e/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index a8e235e..6230751 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -2744,6 +2744,16 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     case HiveParser.TOK_CHARSETLITERAL:
     case HiveParser.KW_TRUE:
     case HiveParser.KW_FALSE:
+    case HiveParser.TOK_INTERVAL_DAY_LITERAL:
+    case HiveParser.TOK_INTERVAL_DAY_TIME:
+    case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL:
+    case HiveParser.TOK_INTERVAL_HOUR_LITERAL:
+    case HiveParser.TOK_INTERVAL_MINUTE_LITERAL:
+    case HiveParser.TOK_INTERVAL_MONTH_LITERAL:
+    case HiveParser.TOK_INTERVAL_SECOND_LITERAL:
+    case HiveParser.TOK_INTERVAL_YEAR_LITERAL:
+    case HiveParser.TOK_INTERVAL_YEAR_MONTH:
+    case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL:
       break;
 
     case HiveParser.TOK_FUNCTION:

http://git-wip-us.apache.org/repos/asf/hive/blob/06a4f98e/ql/src/test/queries/clientpositive/interval_3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/interval_3.q b/ql/src/test/queries/clientpositive/interval_3.q
index ee0f83c..f32793e 100644
--- a/ql/src/test/queries/clientpositive/interval_3.q
+++ b/ql/src/test/queries/clientpositive/interval_3.q
@@ -38,3 +38,26 @@ from
   ) b
   on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey
 order by a.l_orderkey;
+
+-- interval literal in join condition
+create table date_dim_d1(
+	d_week_seq int,
+	d_date string);
+
+    EXPLAIN SELECT
+           d1.d_week_seq
+    FROM
+           date_dim_d1 d1
+           JOIN date_dim_d1 d3
+    WHERE
+           Cast(d3.d_date AS date) > Cast(d1.d_date AS date)
+    		+ INTERVAL '1' year
+    		+ INTERVAL '2' month
+    		+ INTERVAL '5' day
+    		+ INTERVAL '4' hour
+    		+ INTERVAL '10' minute
+    		+ INTERVAL '9' second
+    	AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH;
+
+    DROP table date_dim_d1;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06a4f98e/ql/src/test/results/clientpositive/interval_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/interval_3.q.out b/ql/src/test/results/clientpositive/interval_3.q.out
index ac71514..97eec12 100644
--- a/ql/src/test/results/clientpositive/interval_3.q.out
+++ b/ql/src/test/results/clientpositive/interval_3.q.out
@@ -96,3 +96,114 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@lineitem
 #### A masked pattern was here ####
 37	37	26 00:00:00.000000000
+PREHOOK: query: create table date_dim_d1(
+	d_week_seq int,
+	d_date string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@date_dim_d1
+POSTHOOK: query: create table date_dim_d1(
+	d_week_seq int,
+	d_date string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@date_dim_d1
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: EXPLAIN SELECT
+           d1.d_week_seq
+    FROM
+           date_dim_d1 d1
+           JOIN date_dim_d1 d3
+    WHERE
+           Cast(d3.d_date AS date) > Cast(d1.d_date AS date)
+    		+ INTERVAL '1' year
+    		+ INTERVAL '2' month
+    		+ INTERVAL '5' day
+    		+ INTERVAL '4' hour
+    		+ INTERVAL '10' minute
+    		+ INTERVAL '9' second
+    	AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT
+           d1.d_week_seq
+    FROM
+           date_dim_d1 d1
+           JOIN date_dim_d1 d3
+    WHERE
+           Cast(d3.d_date AS date) > Cast(d1.d_date AS date)
+    		+ INTERVAL '1' year
+    		+ INTERVAL '2' month
+    		+ INTERVAL '5' day
+    		+ INTERVAL '4' hour
+    		+ INTERVAL '10' minute
+    		+ INTERVAL '9' second
+    	AND Cast(d3.d_date AS date) < Cast(d1.d_date AS date) + INTERVAL '1-2' YEAR TO MONTH
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: d1
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              expressions: d_week_seq (type: int), d_date (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                value expressions: _col0 (type: int), _col1 (type: string)
+          TableScan
+            alias: d3
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              expressions: d_date (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                value expressions: _col0 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE
+          Filter Operator
+            predicate: ((CAST( CAST( _col2 AS DATE) AS TIMESTAMP) > ((((((CAST( _col1 AS DATE) + INTERVAL'1-0') + INTERVAL'0-2') + INTERVAL'5 00:00:00.000000000') + INTERVAL'0 04:00:00.000000000') + INTERVAL'0 00:10:00.000000000') + INTERVAL'0 00:00:09.000000000')) and (CAST( _col2 AS DATE) < (CAST( _col1 AS DATE) + INTERVAL'1-2'))) (type: boolean)
+            Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: int)
+              outputColumnNames: _col0
+              Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 1 Data size: 1 Basic stats: PARTIAL Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: DROP table date_dim_d1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@date_dim_d1
+PREHOOK: Output: default@date_dim_d1
+POSTHOOK: query: DROP table date_dim_d1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@date_dim_d1
+POSTHOOK: Output: default@date_dim_d1


[2/2] hive git commit: HIVE-19416 : merge master into branch (Sergey Shelukhin) 0720

Posted by se...@apache.org.
HIVE-19416 : merge master into branch (Sergey Shelukhin) 0720


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bdd3cec1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bdd3cec1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bdd3cec1

Branch: refs/heads/master-txnstats
Commit: bdd3cec1f68178aa2128ee6db7fbc0a0c0bb021f
Parents: cdb32a7 06a4f98
Author: sergey <se...@apache.org>
Authored: Fri Jul 20 12:24:26 2018 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Jul 20 12:24:26 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  10 ++
 ql/src/test/queries/clientpositive/interval_3.q |  23 ++++
 .../results/clientpositive/interval_3.q.out     | 111 +++++++++++++++++++
 3 files changed, 144 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/bdd3cec1/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------