You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2015/08/11 23:21:19 UTC

[1/4] hive git commit: HIVE-9619: Uninitialized read of numBitVectors in NumDistinctValueEstimator (Alexander Pivovarov via gopalv)

Repository: hive
Updated Branches:
  refs/heads/branch-1.0 84af92e65 -> 1954c9088


HIVE-9619: Uninitialized read of numBitVectors in NumDistinctValueEstimator (Alexander Pivovarov via gopalv)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1660464 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8b9ba260
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8b9ba260
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8b9ba260

Branch: refs/heads/branch-1.0
Commit: 8b9ba2600e218561e52b8ae38701d801b31ce4da
Parents: 84af92e
Author: Gopal Vijayaraghavan <go...@apache.org>
Authored: Tue Feb 17 18:32:04 2015 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 14:16:22 2015 -0700

----------------------------------------------------------------------
 .../udf/generic/NumDistinctValueEstimator.java  | 59 +++++++++++---------
 1 file changed, 33 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8b9ba260/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java
index 2817044..8212bea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java
@@ -35,18 +35,18 @@ public class NumDistinctValueEstimator {
    * independent. As a consequence, the hash values will not distribute uniformly from 0 to 2^p-1
    * thus introducing errors in the estimates.
    */
-  private static final int bitVectorSize = 31;
-  private int numBitVectors;
+  private static final int BIT_VECTOR_SIZE = 31;
+  private final int numBitVectors;
 
   // Refer to Flajolet-Martin'86 for the value of phi
-  private final double phi =  0.77351;
+  private static final double PHI = 0.77351;
 
-  private int[] a;
-  private int[] b;
-  private  FastBitSet[] bitVector = new FastBitSet[numBitVectors];
+  private final int[] a;
+  private final int[] b;
+  private final FastBitSet[] bitVector;
 
-  private Random aValue;
-  private Random bValue;
+  private final Random aValue;
+  private final Random bValue;
 
   /* Create a new distinctValueEstimator
    */
@@ -54,7 +54,7 @@ public class NumDistinctValueEstimator {
     this.numBitVectors = numBitVectors;
     bitVector = new FastBitSet[numBitVectors];
     for (int i=0; i< numBitVectors; i++) {
-      bitVector[i] = new FastBitSet(bitVectorSize);
+      bitVector[i] = new FastBitSet(BIT_VECTOR_SIZE);
     }
 
     a = new int[numBitVectors];
@@ -98,23 +98,30 @@ public class NumDistinctValueEstimator {
       b[i] = randVal;
 
       if (a[i] < 0) {
-        a[i] = a[i] + (1 << bitVectorSize - 1);
+        a[i] = a[i] + (1 << BIT_VECTOR_SIZE - 1);
       }
 
       if (b[i] < 0) {
-        b[i] = b[i] + (1 << bitVectorSize - 1);
+        b[i] = b[i] + (1 << BIT_VECTOR_SIZE - 1);
       }
     }
   }
 
   public NumDistinctValueEstimator(String s, int numBitVectors) {
-    FastBitSet b[] = deserialize(s, numBitVectors);
+    this.numBitVectors = numBitVectors;
+    FastBitSet bitVectorDeser[] = deserialize(s, numBitVectors);
     bitVector = new FastBitSet[numBitVectors];
     for(int i=0; i <numBitVectors; i++) {
-       bitVector[i] = new FastBitSet(bitVectorSize);
+       bitVector[i] = new FastBitSet(BIT_VECTOR_SIZE);
        bitVector[i].clear();
-       bitVector[i].or(b[i]);
+       bitVector[i].or(bitVectorDeser[i]);
     }
+
+    a = null;
+    b = null;
+
+    aValue = null;
+    bValue = null;
   }
 
   /**
@@ -135,7 +142,7 @@ public class NumDistinctValueEstimator {
   }
 
   public int getBitVectorSize() {
-    return bitVectorSize;
+    return BIT_VECTOR_SIZE;
   }
 
   public void printNumDistinctValueEstimator() {
@@ -145,7 +152,7 @@ public class NumDistinctValueEstimator {
     LOG.debug("Number of Vectors:");
     LOG.debug(numBitVectors);
     LOG.debug("Vector Size: ");
-    LOG.debug(bitVectorSize);
+    LOG.debug(BIT_VECTOR_SIZE);
 
     for (int i=0; i < numBitVectors; i++) {
       t = t + bitVector[i].toString();
@@ -173,7 +180,7 @@ public class NumDistinctValueEstimator {
   private FastBitSet[] deserialize(String s, int numBitVectors) {
     FastBitSet[] b = new FastBitSet[numBitVectors];
     for (int j=0; j < numBitVectors; j++) {
-      b[j] = new FastBitSet(bitVectorSize);
+      b[j] = new FastBitSet(BIT_VECTOR_SIZE);
       b[j].clear();
     }
 
@@ -219,7 +226,7 @@ public class NumDistinctValueEstimator {
   }
 
   private int generateHash(long v, int hashNum) {
-    int mod = (1<<bitVectorSize) - 1;
+    int mod = (1<<BIT_VECTOR_SIZE) - 1;
     long tempHash = a[hashNum] * v  + b[hashNum];
     tempHash %= mod;
     int hash = (int) tempHash;
@@ -234,7 +241,7 @@ public class NumDistinctValueEstimator {
   }
 
   private int generateHashForPCSA(long v) {
-    int mod = 1 << (bitVectorSize - 1) - 1;
+    int mod = 1 << (BIT_VECTOR_SIZE - 1) - 1;
     long tempHash = a[0] * v + b[0];
     tempHash %= mod;
     int hash = (int) tempHash;
@@ -259,8 +266,8 @@ public class NumDistinctValueEstimator {
       int index;
 
       // Find the index of the least significant bit that is 1
-      for (index=0; index<bitVectorSize; index++) {
-        if (hash % 2 == 1) {
+      for (index=0; index<BIT_VECTOR_SIZE; index++) {
+        if (hash % 2 != 0) {
           break;
         }
         hash = hash >> 1;
@@ -277,8 +284,8 @@ public class NumDistinctValueEstimator {
     int index;
 
     // Find the index of the least significant bit that is 1
-    for (index=0; index<bitVectorSize; index++) {
-      if (rho % 2 == 1) {
+    for (index=0; index<BIT_VECTOR_SIZE; index++) {
+      if (rho % 2 != 0) {
         break;
       }
       rho = rho >> 1;
@@ -321,13 +328,13 @@ public class NumDistinctValueEstimator {
 
     for (int i=0; i < numBitVectors; i++) {
       int index = 0;
-      while (bitVector[i].get(index) && index < bitVectorSize) {
+      while (bitVector[i].get(index) && index < BIT_VECTOR_SIZE) {
         index = index + 1;
       }
       S = S + index;
     }
 
-    numDistinctValues = ((numBitVectors/phi) * Math.pow(2.0, S/numBitVectors));
+    numDistinctValues = ((numBitVectors/PHI) * Math.pow(2.0, S/numBitVectors));
     return ((long)numDistinctValues);
   }
 
@@ -345,7 +352,7 @@ public class NumDistinctValueEstimator {
     }
 
     avgLeastSigZero =
-        (double)(sumLeastSigZero/(numBitVectors * 1.0)) - (Math.log(phi)/Math.log(2.0));
+        (double)(sumLeastSigZero/(numBitVectors * 1.0)) - (Math.log(PHI)/Math.log(2.0));
     numDistinctValues = Math.pow(2.0, avgLeastSigZero);
     return ((long)(numDistinctValues));
   }


[3/4] hive git commit: HIVE-10140 : Window boundary is not compared correctly (Aihua Xu via Ashutosh Chauhan)

Posted by px...@apache.org.
HIVE-10140 : Window boundary is not compared correctly (Aihua Xu via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e720b586
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e720b586
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e720b586

Branch: refs/heads/branch-1.0
Commit: e720b586bf0b6d881a58ddf38fecf132bccf9011
Parents: 20e1cc8
Author: Aihua Xu <ai...@gmail.com>
Authored: Thu Apr 30 09:42:00 2015 -0700
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 14:17:41 2015 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/parse/WindowingSpec.java     |   7 +-
 .../clientpositive/windowing_windowspec.q       |   2 +
 .../clientpositive/windowing_windowspec.q.out   | 108 +++++++++++++++++++
 3 files changed, 115 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e720b586/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
index 28afc6b..83f3513 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingSpec.java
@@ -613,8 +613,10 @@ public class WindowingSpec {
       if (c != 0) {
         return c;
       }
+
       RangeBoundarySpec rb = (RangeBoundarySpec) other;
-      return amt - rb.amt;
+      // Valid range is "range/rows between 10 preceding and 2 preceding" for preceding case
+      return this.direction == Direction.PRECEDING ? rb.amt - amt : amt - rb.amt;
     }
 
   }
@@ -712,7 +714,8 @@ public class WindowingSpec {
         return c;
       }
       ValueBoundarySpec vb = (ValueBoundarySpec) other;
-      return amt - vb.amt;
+      // Valid range is "range/rows between 10 preceding and 2 preceding" for preceding case
+      return this.direction == Direction.PRECEDING ? vb.amt - amt : amt - vb.amt;
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e720b586/ql/src/test/queries/clientpositive/windowing_windowspec.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/windowing_windowspec.q b/ql/src/test/queries/clientpositive/windowing_windowspec.q
index 6d8ce67..2055e9d 100644
--- a/ql/src/test/queries/clientpositive/windowing_windowspec.q
+++ b/ql/src/test/queries/clientpositive/windowing_windowspec.q
@@ -31,6 +31,8 @@ select s, sum(i) over(partition by ts order by s) from over10k limit 100;
 
 select f, sum(f) over (partition by ts order by f range between unbounded preceding and current row) from over10k limit 100;
 
+select f, sum(f) over (partition by ts order by f rows between 2 preceding and 1 preceding) from over10k limit 100;
+
 select s, i, round(avg(d) over (partition by s order by i) / 10.0 , 2) from over10k limit 7;
 
 select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i) limit 7;

http://git-wip-us.apache.org/repos/asf/hive/blob/e720b586/ql/src/test/results/clientpositive/windowing_windowspec.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/windowing_windowspec.q.out b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
index 00af6b8..de4ae97 100644
--- a/ql/src/test/results/clientpositive/windowing_windowspec.q.out
+++ b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
@@ -800,6 +800,114 @@ POSTHOOK: Input: default@over10k
 71.68	722.6499947607517
 79.46	802.1099938452244
 80.02	882.1299904882908
+PREHOOK: query: select f, sum(f) over (partition by ts order by f rows between 2 preceding and 1 preceding) from over10k limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k
+#### A masked pattern was here ####
+POSTHOOK: query: select f, sum(f) over (partition by ts order by f rows between 2 preceding and 1 preceding) from over10k limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k
+#### A masked pattern was here ####
+3.17	14.0600004196167
+10.89	28.600000381469727
+14.54	43.38000011444092
+14.78	58.0600004196167
+17.85	67.78000068664551
+20.61	81.9300012588501
+28.69	96.3700008392334
+29.22	109.69000053405762
+31.17	127.42999839782715
+38.35	137.3499984741211
+38.61	147.60999870300293
+39.48	156.97999954223633
+40.54	160.22999954223633
+41.6	167.70000076293945
+46.08	182.5800018310547
+54.36	198.97999954223633
+56.94	222.3400001525879
+64.96	249.7799949645996
+73.52	273.99999618530273
+78.58	298.4700012207031
+81.41	318.2200012207031
+84.71	332.1300048828125
+87.43	344.9100036621094
+91.36	356.45999908447266
+92.96	366.79000091552734
+95.04	279.36000061035156
+0.83	2.8199999928474426
+1.99	6.550000011920929
+3.73	15.409999668598175
+8.86	25.199999570846558
+10.62	34.52999925613403
+11.32	43.6299991607666
+12.83	49.46999931335449
+14.7	53.80999946594238
+14.96	60.06999969482422
+17.58	66.34000015258789
+19.1	72.65000057220459
+21.01	84.64000129699707
+26.95	94.29000091552734
+27.23	104.26000022888184
+29.07	112.95999908447266
+29.71	117.8499984741211
+31.84	122.55999946594238
+31.94	128.80999946594238
+35.32	136.42000007629395
+37.32	143.07999992370605
+38.5	153.22000122070312
+42.08	162.20000076293945
+44.3	169.54000091552734
+44.66	177.88000106811523
+46.84	184.68999862670898
+48.89	190.02999877929688
+49.64	195.64999771118164
+50.28	200.89999771118164
+52.09	205.2699966430664
+53.26	209.71999740600586
+54.09	215.88999938964844
+56.45	220.55999755859375
+56.76	228.70999908447266
+61.41	236.5
+61.88	243.07999801635742
+63.03	250.87000274658203
+64.55	258.08000564575195
+68.62	272.3300018310547
+76.13	288.3500061035156
+79.05	304.2300033569336
+80.43	317.02000427246094
+81.41	323.74000549316406
+82.85	328.67000579833984
+83.98	332.4500045776367
+84.21	336.59000396728516
+85.55	341.67000579833984
+87.93	346.62000274658203
+88.93	356.6800003051758
+94.27	370.57999420166016
+99.45	282.6499938964844
+0.36	0.8400000035762787
+0.48	1.6300000250339508
+0.79	2.9000000059604645
+1.27	7.020000010728836
+4.48	15.540000021457672
+9.0	38.02000045776367
+23.27	61.87999963760376
+25.13	82.73999977111816
+25.34	99.64999961853027
+25.91	105.38999938964844
+29.01	110.72999954223633
+30.47	123.34000015258789
+37.95	136.72999954223633
+39.3	153.6299991607666
+45.91	175.5999984741211
+52.44	191.74999618530273
+54.1	209.14999771118164
+56.7	222.0099983215332
+58.77	231.6599998474121
+62.09	245.7599983215332
+68.2	260.73999786376953
+71.68	281.4299964904785
+79.46	299.35999298095703
+80.02	312.4499969482422
 PREHOOK: query: select s, i, round(avg(d) over (partition by s order by i) / 10.0 , 2) from over10k limit 7
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over10k


[4/4] hive git commit: HIVE-10698 : query on view results fails with table not found error if view is created with subquery alias (CTE). (Pengcheng Xiong via Ashutosh Chauhan, John Pullokkaran)

Posted by px...@apache.org.
HIVE-10698 : query on view results fails with table not found error if view is created with subquery alias (CTE). (Pengcheng Xiong via Ashutosh Chauhan, John Pullokkaran)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1954c908
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1954c908
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1954c908

Branch: refs/heads/branch-1.0
Commit: 1954c90881f0072c385595d01f753e6c3c4317a6
Parents: e720b58
Author: Pengcheng Xiong <px...@hortonworks.com>
Authored: Thu May 14 20:42:00 2015 -0700
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 14:18:13 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |   4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  66 ++---
 ql/src/test/queries/clientpositive/cteViews.q   |  41 ++++
 .../test/results/clientpositive/cteViews.q.out  | 242 +++++++++++++++++++
 4 files changed, 309 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/1954c908/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index ef6d6f7..0fe1d69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -2046,7 +2046,7 @@ queryStatementExpression[boolean topLevel]
     (w=withClause {topLevel}?)?
     queryStatementExpressionBody[topLevel] {
       if ($w.tree != null) {
-      adaptor.addChild($queryStatementExpressionBody.tree, $w.tree);
+      $queryStatementExpressionBody.tree.insertChild(0, $w.tree);
       }
     }
     ->  queryStatementExpressionBody
@@ -2164,7 +2164,7 @@ selectStatementWithCTE
     (w=withClause)?
     selectStatement[true] {
       if ($w.tree != null) {
-      adaptor.addChild($selectStatement.tree, $w.tree);
+      $selectStatement.tree.insertChild(0, $w.tree);
       }
     }
     ->  selectStatement

http://git-wip-us.apache.org/repos/asf/hive/blob/1954c908/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 63d5214..5deda9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -798,10 +798,13 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
     qb.getParseInfo().setSrcForAlias(alias, tableTree);
 
-    unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase());
-    if (aliasIndex != 0) {
-      unparseTranslator.addIdentifierTranslation((ASTNode) tabref
-          .getChild(aliasIndex));
+    // if alias to CTE contains the alias, we do not do the translation because
+    // cte is actually a subquery.
+    if (!this.aliasToCTEs.containsKey(alias)) {
+      unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase());
+      if (aliasIndex != 0) {
+        unparseTranslator.addIdentifierTranslation((ASTNode) tabref.getChild(aliasIndex));
+      }
     }
 
     return alias;
@@ -1019,19 +1022,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
    *
    */
   private ASTNode findCTEFromName(QB qb, String cteName) {
-
-    /*
-     * When saving a view definition all table references in the AST are qualified; including CTE references.
-     * Where as CTE definitions have no DB qualifier; so we strip out the DB qualifier before searching in
-     * <code>aliasToCTEs</code> map.
-     */
-    String currDB = SessionState.get().getCurrentDatabase();
-    if ( currDB != null && cteName.startsWith(currDB) &&
-        cteName.length() > currDB.length() &&
-        cteName.charAt(currDB.length()) == '.'   ) {
-      cteName = cteName.substring(currDB.length() + 1);
-    }
-
     StringBuffer qId = new StringBuffer();
     if (qb.getId() != null) {
       qId.append(qb.getId());
@@ -1064,14 +1054,6 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     cteAlias = cteAlias == null ? cteName : cteAlias;
     ASTNode cteQryNode = findCTEFromName(qb, cteName);
     QBExpr cteQBExpr = new QBExpr(cteAlias);
-
-    String cteText = ctx.getTokenRewriteStream().toString(
-        cteQryNode.getTokenStartIndex(), cteQryNode.getTokenStopIndex());
-    final ASTNodeOrigin cteOrigin = new ASTNodeOrigin("CTE", cteName,
-        cteText, cteAlias, cteQryNode);
-    cteQryNode = (ASTNode) ParseDriver.adaptor.dupTree(cteQryNode);
-    SubQueryUtils.setOriginDeep(cteQryNode, cteOrigin);
-
     doPhase1QBExpr(cteQryNode, cteQBExpr, qb.getId(), cteAlias);
     qb.rewriteCTEToSubq(cteAlias, cteName, cteQBExpr);
   }
@@ -1575,31 +1557,31 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
       for (String alias : tabAliases) {
         String tab_name = qb.getTabNameForAlias(alias);
+        
+        // we first look for this alias from CTE, and then from catalog.
+        /*
+         * if this s a CTE reference: Add its AST as a SubQuery to this QB.
+         */
+        ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
+        if (cteNode != null) {
+          String cte_name = tab_name.toLowerCase();
+          if (ctesExpanded.contains(cte_name)) {
+            throw new SemanticException("Recursive cte " + tab_name + " detected (cycle: "
+                + StringUtils.join(ctesExpanded, " -> ") + " -> " + tab_name + ").");
+          }
+          addCTEAsSubQuery(qb, cte_name, alias);
+          sqAliasToCTEName.put(alias, cte_name);
+          continue;
+        }
+
         Table tab = db.getTable(tab_name, false);
         if (tab == null) {
-          /*
-           * if this s a CTE reference:
-           * Add its AST as a SubQuery to this QB.
-           */
-          ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
-          if ( cteNode != null ) {
-            String cte_name = tab_name.toLowerCase();
-            if (ctesExpanded.contains(cte_name)) {
-              throw new SemanticException("Recursive cte " + tab_name +
-                  " detected (cycle: " + StringUtils.join(ctesExpanded, " -> ") +
-                  " -> " + tab_name + ").");
-            }
-            addCTEAsSubQuery(qb, cte_name, alias);
-            sqAliasToCTEName.put(alias, cte_name);
-            continue;
-          }
           ASTNode src = qb.getParseInfo().getSrcForAlias(alias);
           if (null != src) {
             throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src));
           } else {
             throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(alias));
           }
-
         }
 
         // Disallow INSERT INTO on bucketized tables

http://git-wip-us.apache.org/repos/asf/hive/blob/1954c908/ql/src/test/queries/clientpositive/cteViews.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cteViews.q b/ql/src/test/queries/clientpositive/cteViews.q
new file mode 100644
index 0000000..c076841
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cteViews.q
@@ -0,0 +1,41 @@
+with src1 as (select key from src order by key limit 5)
+select * from src1;
+
+use default;
+drop view v;
+create view v as with cte as (select key, value from src order by key limit 5)
+select key from cte;
+
+describe extended v;
+
+create database bug;
+use bug;
+select * from default.v;
+drop database bug;
+
+use default;
+drop view v;
+create view v as with cte as (select * from src  order by key limit 5)
+select * from cte;
+
+describe extended v;
+
+create database bug;
+use bug;
+select * from default.v;
+drop database bug;
+
+
+use default;
+drop view v;
+create view v as with src1 as (select key from src order by key limit 5)
+select * from src1;
+
+describe extended v;
+
+create database bug;
+use bug;
+select * from default.v;
+use default;
+drop view v;
+drop database bug;

http://git-wip-us.apache.org/repos/asf/hive/blob/1954c908/ql/src/test/results/clientpositive/cteViews.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cteViews.q.out b/ql/src/test/results/clientpositive/cteViews.q.out
new file mode 100644
index 0000000..6291784
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cteViews.q.out
@@ -0,0 +1,242 @@
+PREHOOK: query: with src1 as (select key from src order by key limit 5)
+select * from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with src1 as (select key from src order by key limit 5)
+select * from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: drop view v
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: drop view v
+POSTHOOK: type: DROPVIEW
+PREHOOK: query: create view v as with cte as (select key, value from src order by key limit 5)
+select key from cte
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v
+POSTHOOK: query: create view v as with cte as (select key, value from src order by key limit 5)
+select key from cte
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v
+PREHOOK: query: describe extended v
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@v
+POSTHOOK: query: describe extended v
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@v
+key                 	string              	                    
+	 	 
+#### A masked pattern was here ####
+select key from cte, viewExpandedText:with cte as (select `src`.`key`, `src`.`value` from `default`.`src` order by key limit 5)	 	 
+select `cte`.`key` from cte, tableType:VIRTUAL_VIEW)		 
+PREHOOK: query: create database bug
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:bug
+POSTHOOK: query: create database bug
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:bug
+PREHOOK: query: use bug
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:bug
+POSTHOOK: query: use bug
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:bug
+PREHOOK: query: select * from default.v
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v
+#### A masked pattern was here ####
+POSTHOOK: query: select * from default.v
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+PREHOOK: query: drop database bug
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:bug
+PREHOOK: Output: database:bug
+POSTHOOK: query: drop database bug
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:bug
+POSTHOOK: Output: database:bug
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: drop view v
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v
+PREHOOK: Output: default@v
+POSTHOOK: query: drop view v
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v
+POSTHOOK: Output: default@v
+PREHOOK: query: create view v as with cte as (select * from src  order by key limit 5)
+select * from cte
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v
+POSTHOOK: query: create view v as with cte as (select * from src  order by key limit 5)
+select * from cte
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v
+PREHOOK: query: describe extended v
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@v
+POSTHOOK: query: describe extended v
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@v
+key                 	string              	                    
+value               	string              	                    
+	 	 
+#### A masked pattern was here ####
+select * from cte, viewExpandedText:with cte as (select `src`.`key`, `src`.`value` from `default`.`src`  order by `src`.`key` limit 5)	 	 
+select `cte`.`key`, `cte`.`value` from cte, tableType:VIRTUAL_VIEW)		 
+PREHOOK: query: create database bug
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:bug
+POSTHOOK: query: create database bug
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:bug
+PREHOOK: query: use bug
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:bug
+POSTHOOK: query: use bug
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:bug
+PREHOOK: query: select * from default.v
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v
+#### A masked pattern was here ####
+POSTHOOK: query: select * from default.v
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+PREHOOK: query: drop database bug
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:bug
+PREHOOK: Output: database:bug
+POSTHOOK: query: drop database bug
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:bug
+POSTHOOK: Output: database:bug
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: drop view v
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v
+PREHOOK: Output: default@v
+POSTHOOK: query: drop view v
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v
+POSTHOOK: Output: default@v
+PREHOOK: query: create view v as with src1 as (select key from src order by key limit 5)
+select * from src1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v
+POSTHOOK: query: create view v as with src1 as (select key from src order by key limit 5)
+select * from src1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v
+PREHOOK: query: describe extended v
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@v
+POSTHOOK: query: describe extended v
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@v
+key                 	string              	                    
+	 	 
+#### A masked pattern was here ####
+select * from src1, viewExpandedText:with src1 as (select `src`.`key` from `default`.`src` order by key limit 5)	 	 
+select `src1`.`key` from src1, tableType:VIRTUAL_VIEW)		 
+PREHOOK: query: create database bug
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:bug
+POSTHOOK: query: create database bug
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:bug
+PREHOOK: query: use bug
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:bug
+POSTHOOK: query: use bug
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:bug
+PREHOOK: query: select * from default.v
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v
+#### A masked pattern was here ####
+POSTHOOK: query: select * from default.v
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v
+#### A masked pattern was here ####
+0
+0
+0
+10
+100
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: drop view v
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v
+PREHOOK: Output: default@v
+POSTHOOK: query: drop view v
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v
+POSTHOOK: Output: default@v
+PREHOOK: query: drop database bug
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:bug
+PREHOOK: Output: database:bug
+POSTHOOK: query: drop database bug
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:bug
+POSTHOOK: Output: database:bug


[2/4] hive git commit: HIVE-7351 ANALYZE TABLE statement fails on postgres metastore (Navis via Alan Gates)

Posted by px...@apache.org.
HIVE-7351 ANALYZE TABLE statement fails on postgres metastore (Navis via Alan Gates)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1672696 13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/20e1cc84
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/20e1cc84
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/20e1cc84

Branch: refs/heads/branch-1.0
Commit: 20e1cc84527b2135162c66770400dabe5ee05153
Parents: 8b9ba26
Author: Alan Gates <ga...@apache.org>
Authored: Fri Apr 10 16:13:23 2015 +0000
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 11 14:16:46 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/exec/Utilities.java    | 19 +++++++++++++++++++
 .../hive/ql/stats/jdbc/JDBCStatsAggregator.java  |  2 +-
 .../hive/ql/stats/jdbc/JDBCStatsPublisher.java   |  4 ++--
 3 files changed, 22 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/20e1cc84/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index b2db584..c696c1d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -47,6 +47,7 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
 import java.sql.SQLTransientException;
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
@@ -3016,6 +3017,24 @@ public final class Utilities {
     }
   }
 
+  public static void setQueryTimeout(java.sql.Statement stmt, int timeout) throws SQLException {
+    if (timeout < 0) {
+      LOG.info("Invalid query timeout " + timeout);
+      return;
+    }
+    try {
+      stmt.setQueryTimeout(timeout);
+    } catch (SQLException e) {
+      String message = e.getMessage() == null ? null : e.getMessage().toLowerCase();
+      if (e instanceof SQLFeatureNotSupportedException ||
+         (message != null && (message.contains("implemented") || message.contains("supported")))) {
+        LOG.info("setQueryTimeout is not supported");
+        return;
+      }
+      throw e;
+    }
+  }
+
   /**
    * Introducing a random factor to the wait time before another retry.
    * The wait time is dependent on # of failures and a random factor.

http://git-wip-us.apache.org/repos/asf/hive/blob/20e1cc84/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
index e26031c..cf25bee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
@@ -81,7 +81,7 @@ public class JDBCStatsAggregator implements StatsAggregator {
     Utilities.SQLCommand<Void> setQueryTimeout = new Utilities.SQLCommand<Void>() {
       @Override
       public Void run(PreparedStatement stmt) throws SQLException {
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
         return null;
       }
     };

http://git-wip-us.apache.org/repos/asf/hive/blob/20e1cc84/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
index 32826e7..4028a9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
@@ -81,7 +81,7 @@ public class JDBCStatsPublisher implements StatsPublisher {
     Utilities.SQLCommand<Void> setQueryTimeout = new Utilities.SQLCommand<Void>() {
       @Override
       public Void run(PreparedStatement stmt) throws SQLException {
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
         return null;
       }
     };
@@ -278,7 +278,7 @@ public class JDBCStatsPublisher implements StatsPublisher {
         conn = DriverManager.getConnection(connectionString);
 
         stmt = conn.createStatement();
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
 
         // TODO: why is this not done using Hive db scripts?
         // Check if the table exists