You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/10/15 23:01:48 UTC

svn commit: r1632177 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientpositive/

Author: hashutosh
Date: Wed Oct 15 21:01:48 2014
New Revision: 1632177

URL: http://svn.apache.org/r1632177
Log:
HIVE-7733 : Ambiguous column reference error on query (Navis via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/complex_alias.q
    hive/trunk/ql/src/test/results/clientpositive/complex_alias.q.out
Removed:
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col0.q
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q
    hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col0.q.out
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col1.q.out
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col2.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/results/clientnegative/ambiguous_col.q.out
    hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Wed Oct 15 21:01:48 2014
@@ -130,6 +130,10 @@ public class QB {
     return (outer_id == null ? alias : outer_id + ":" + alias);
   }
 
+  public String getAlias() {
+    return qbp.getAlias();
+  }
+
   public QBParseInfo getParseInfo() {
     return qbp;
   }
@@ -248,6 +252,12 @@ public class QB {
     return isQuery;
   }
 
+  // to decide whether to rewrite RR of subquery
+  public boolean isTopLevelSelectStarQuery() {
+    return !isCTAS() && qbp.isTopLevelSimpleSelectStarQuery();
+  }
+
+  // to find target for fetch task conversion optimizer (not allows subqueries)
   public boolean isSimpleSelectQuery() {
     return qbp.isSimpleSelectQuery() && aliasToSubq.isEmpty() && !isCTAS() &&
         !qbp.isAnalyzeCommand();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java Wed Oct 15 21:01:48 2014
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.antlr.runtime.tree.Tree;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
@@ -449,39 +450,49 @@ public class QBParseInfo {
     this.outerQueryLimit = outerQueryLimit;
   }
 
+  public boolean isTopLevelSimpleSelectStarQuery() {
+    if (alias != null || destToSelExpr.size() != 1 || !isSimpleSelectQuery()) {
+      return false;
+    }
+    for (ASTNode selExprs : destToSelExpr.values()) {
+      if (selExprs.getChildCount() != 1) {
+        return false;
+      }
+      Tree sel = selExprs.getChild(0).getChild(0);
+      if (sel == null || sel.getType() != HiveParser.TOK_ALLCOLREF) {
+        return false;
+      }
+    }
+    return true;
+  }
+
   public boolean isSimpleSelectQuery() {
-    if (isSubQ || (joinExpr != null)
-        || (!destToGroupby.isEmpty()) || (!destToClusterby.isEmpty())
-        || (!aliasToLateralViews.isEmpty())) {
+    if (isSubQ || joinExpr != null || !destToOrderby.isEmpty() || !destToSortby.isEmpty()
+        || !destToGroupby.isEmpty() || !destToClusterby.isEmpty() || !destToDistributeby.isEmpty()
+        || !aliasToLateralViews.isEmpty() || !destToLateralView.isEmpty()) {
       return false;
     }
 
-    Iterator<Map.Entry<String, LinkedHashMap<String, ASTNode>>> aggrIter = destToAggregationExprs
-        .entrySet().iterator();
-    while (aggrIter.hasNext()) {
-      HashMap<String, ASTNode> h = aggrIter.next().getValue();
-      if ((h != null) && (!h.isEmpty())) {
+    for (Map<String, ASTNode> entry : destToAggregationExprs.values()) {
+      if (entry != null && !entry.isEmpty()) {
         return false;
       }
     }
 
-    if (!destToDistinctFuncExprs.isEmpty()) {
-      Iterator<Map.Entry<String, List<ASTNode>>> distn = destToDistinctFuncExprs
-          .entrySet().iterator();
-      while (distn.hasNext()) {
-        List<ASTNode> ct = distn.next().getValue();
-        if (!ct.isEmpty()) {
-          return false;
-        }
+    for (Map<String, ASTNode> entry : destToWindowingExprs.values()) {
+      if (entry != null && !entry.isEmpty()) {
+        return false;
+      }
+    }
+
+    for (List<ASTNode> ct : destToDistinctFuncExprs.values()) {
+      if (!ct.isEmpty()) {
+        return false;
       }
     }
 
-    Iterator<Map.Entry<String, ASTNode>> iter = nameToDest.entrySet()
-        .iterator();
-    while (iter.hasNext()) {
-      Map.Entry<String, ASTNode> entry = iter.next();
-      ASTNode v = entry.getValue();
-      if (!(((ASTNode)v.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) {
+    for (ASTNode v : nameToDest.values()) {
+      if (!(v.getChild(0).getType() == HiveParser.TOK_TMP_FILE)) {
         return false;
       }
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Wed Oct 15 21:01:48 2014
@@ -29,7 +29,6 @@ import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 
@@ -195,17 +194,6 @@ public class RowResolver implements Seri
     return ret;
   }
 
-  /**
-   * check if column name is already exist in RR
-   */
-  public void checkColumn(String tableAlias, String columnAlias) throws SemanticException {
-    ColumnInfo prev = get(null, columnAlias);
-    if (prev != null &&
-        (tableAlias == null || !tableAlias.equalsIgnoreCase(prev.getTabAlias()))) {
-      throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(columnAlias));
-    }
-  }
-
   public ArrayList<ColumnInfo> getColumnInfos() {
     return rowSchema.getSignature();
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Oct 15 21:01:48 2014
@@ -37,7 +37,6 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
@@ -70,7 +69,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
 import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
@@ -2710,7 +2708,7 @@ public class SemanticAnalyzer extends Ba
   @SuppressWarnings("nls")
   private Integer genColListRegex(String colRegex, String tabAlias,
       ASTNode sel, ArrayList<ExprNodeDesc> col_list,
-      RowResolver input, Integer pos, RowResolver output, List<String> aliases, boolean subQuery)
+      RowResolver input, Integer pos, RowResolver output, List<String> aliases)
       throws SemanticException {
 
     // The table alias should exist
@@ -2768,9 +2766,6 @@ public class SemanticAnalyzer extends Ba
           continue;
         }
 
-        if (subQuery) {
-          output.checkColumn(tmp[0], tmp[1]);
-        }
         ColumnInfo oColInfo = inputColsProcessed.get(colInfo);
         if (oColInfo == null) {
           ExprNodeColumnDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
@@ -3396,7 +3391,6 @@ public class SemanticAnalyzer extends Ba
       posn++;
     }
 
-    boolean subQuery = qb.getParseInfo().getIsSubQ();
     boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() ==
         HiveParser.TOK_TRANSFORM);
     if (isInTransform) {
@@ -3434,7 +3428,7 @@ public class SemanticAnalyzer extends Ba
       }
       if (isUDTF && (selectStar = udtfExprType == HiveParser.TOK_FUNCTIONSTAR)) {
         genColListRegex(".*", null, (ASTNode) udtfExpr.getChild(0),
-            col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
+            col_list, inputRR, pos, out_rwsch, qb.getAliases());
       }
     }
 
@@ -3556,7 +3550,7 @@ public class SemanticAnalyzer extends Ba
       if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
         pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
             : getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
-            expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
+            expr, col_list, inputRR, pos, out_rwsch, qb.getAliases());
         selectStar = true;
       } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
           && !inputRR.getIsExprResolver()
@@ -3565,7 +3559,7 @@ public class SemanticAnalyzer extends Ba
         // This can only happen without AS clause
         // We don't allow this for ExprResolver - the Group By case
         pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
-            null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
+            null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases());
       } else if (expr.getType() == HiveParser.DOT
           && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
           && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
@@ -3578,7 +3572,7 @@ public class SemanticAnalyzer extends Ba
         pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
             unescapeIdentifier(expr.getChild(0).getChild(0).getText()
                 .toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
-            qb.getAliases(), subQuery);
+            qb.getAliases());
       } else {
         // Case when this is an expression
         TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
@@ -3592,9 +3586,6 @@ public class SemanticAnalyzer extends Ba
           colAlias = recommended;
         }
         col_list.add(exp);
-        if (subQuery) {
-          out_rwsch.checkColumn(tabAlias, colAlias);
-        }
 
         ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
             exp.getWritableObjectInspector(), tabAlias, false);
@@ -8909,24 +8900,6 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    // change curr ops row resolver's tab aliases to query alias if it
-    // exists
-    if (qb.getParseInfo().getAlias() != null) {
-      RowResolver rr = opParseCtx.get(curr).getRowResolver();
-      RowResolver newRR = new RowResolver();
-      String alias = qb.getParseInfo().getAlias();
-      for (ColumnInfo colInfo : rr.getColumnInfos()) {
-        String name = colInfo.getInternalName();
-        String[] tmp = rr.reverseLookup(name);
-        if ("".equals(tmp[0]) || tmp[1] == null) {
-          // ast expression is not a valid column name for table
-          tmp[1] = colInfo.getInternalName();
-        }
-        newRR.put(alias, tmp[1], colInfo);
-      }
-      opParseCtx.get(curr).setRowResolver(newRR);
-    }
-
     return curr;
   }
 
@@ -9511,13 +9484,14 @@ public class SemanticAnalyzer extends Ba
     }
   }
 
-  private Operator genPlan(QBExpr qbexpr) throws SemanticException {
+  private Operator genPlan(QB parent, QBExpr qbexpr) throws SemanticException {
     if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
-      return genPlan(qbexpr.getQB());
+      boolean skipAmbiguityCheck = viewSelect == null && parent.isTopLevelSelectStarQuery();
+      return genPlan(qbexpr.getQB(), skipAmbiguityCheck);
     }
     if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) {
-      Operator qbexpr1Ops = genPlan(qbexpr.getQBExpr1());
-      Operator qbexpr2Ops = genPlan(qbexpr.getQBExpr2());
+      Operator qbexpr1Ops = genPlan(parent, qbexpr.getQBExpr1());
+      Operator qbexpr2Ops = genPlan(parent, qbexpr.getQBExpr2());
 
       return genUnionPlan(qbexpr.getAlias(), qbexpr.getQBExpr1().getAlias(),
           qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
@@ -9525,8 +9499,13 @@ public class SemanticAnalyzer extends Ba
     return null;
   }
 
-  @SuppressWarnings("nls")
   public Operator genPlan(QB qb) throws SemanticException {
+    return genPlan(qb, false);
+  }
+
+  @SuppressWarnings("nls")
+  public Operator genPlan(QB qb, boolean skipAmbiguityCheck)
+      throws SemanticException {
 
     // First generate all the opInfos for the elements in the from clause
     Map<String, Operator> aliasToOpInfo = new HashMap<String, Operator>();
@@ -9534,8 +9513,7 @@ public class SemanticAnalyzer extends Ba
     // Recurse over the subqueries to fill the subquery part of the plan
     for (String alias : qb.getSubqAliases()) {
       QBExpr qbexpr = qb.getSubqForAlias(alias);
-      aliasToOpInfo.put(alias, genPlan(qbexpr));
-      qbexpr.setAlias(alias);
+      aliasToOpInfo.put(alias, genPlan(qb, qbexpr));
     }
 
     // Recurse over all the source tables
@@ -9634,10 +9612,38 @@ public class SemanticAnalyzer extends Ba
       LOG.debug("Created Plan for Query Block " + qb.getId());
     }
 
+    if (qb.getAlias() != null) {
+      rewriteRRForSubQ(qb.getAlias(), bodyOpInfo, skipAmbiguityCheck);
+    }
+
     this.qb = qb;
     return bodyOpInfo;
   }
 
+  // change curr ops row resolver's tab aliases to subq alias
+  private void rewriteRRForSubQ(String alias, Operator operator, boolean skipAmbiguityCheck)
+      throws SemanticException {
+    RowResolver rr = opParseCtx.get(operator).getRowResolver();
+    RowResolver newRR = new RowResolver();
+    for (ColumnInfo colInfo : rr.getColumnInfos()) {
+      String name = colInfo.getInternalName();
+      String[] tmp = rr.reverseLookup(name);
+      if ("".equals(tmp[0]) || tmp[1] == null) {
+        // ast expression is not a valid column name for table
+        tmp[1] = colInfo.getInternalName();
+      } else if (newRR.get(alias, tmp[1]) != null) {
+        // enforce uniqueness of column names
+        if (!skipAmbiguityCheck) {
+          throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(tmp[1] + " in " + alias));
+        }
+        // if it's wrapped by top-level select star query, skip ambiguity check (for backward compatibility)
+        tmp[1] = colInfo.getInternalName();
+      }
+      newRR.put(alias, tmp[1], colInfo);
+    }
+    opParseCtx.get(operator).setRowResolver(newRR);
+  }
+
   private Table getDummyTable() throws SemanticException {
     Path dummyPath = createDummyFile();
     Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
@@ -13686,7 +13692,6 @@ public class SemanticAnalyzer extends Ba
               ColumnInfo oColInfo = new ColumnInfo(
                   getColumnInternalName(projsForWindowSelOp.size()), wtp.getValue(), null, false);
               if (false) {
-                out_rwsch.checkColumn(null, wExprSpec.getAlias());
                 out_rwsch.put(null, wExprSpec.getAlias(), oColInfo);
               } else {
                 out_rwsch.putExpression(wExprSpec.getExpression(), oColInfo);
@@ -13823,9 +13828,6 @@ public class SemanticAnalyzer extends Ba
         throw new OptiqSemanticException(msg);
       }
 
-      // 4. Determine if select corresponds to a subquery
-      subQuery = qb.getParseInfo().getIsSubQ();
-
       // 4. Bailout if select involves Transform
       boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() == HiveParser.TOK_TRANSFORM);
       if (isInTransform) {
@@ -13883,8 +13885,7 @@ public class SemanticAnalyzer extends Ba
         if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
           pos = genColListRegex(".*",
               expr.getChildCount() == 0 ? null : getUnescapedName((ASTNode) expr.getChild(0))
-                  .toLowerCase(), expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
-              subQuery);
+                  .toLowerCase(), expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs);
           selectStar = true;
         } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
             && !inputRR.getIsExprResolver()
@@ -13893,7 +13894,7 @@ public class SemanticAnalyzer extends Ba
           // This can only happen without AS clause
           // We don't allow this for ExprResolver - the Group By case
           pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()), null, expr,
-              col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs, subQuery);
+              col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs);
         } else if (expr.getType() == HiveParser.DOT
             && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
             && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getChild(0).getText()
@@ -13904,7 +13905,7 @@ public class SemanticAnalyzer extends Ba
           // We don't allow this for ExprResolver - the Group By case
           pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
               unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()), expr,
-              col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs, subQuery);
+              col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs);
         } else if (expr.toStringTree().contains("TOK_FUNCTIONDI") && !(srcRel instanceof HiveAggregateRel)) {
           // Likely a malformed query eg, select hash(distinct c1) from t1;
           throw new OptiqSemanticException("Distinct without an aggreggation.");
@@ -13919,9 +13920,6 @@ public class SemanticAnalyzer extends Ba
             colAlias = recommended;
           }
           col_list.add(exp);
-          if (subQuery) {
-            out_rwsch.checkColumn(tabAlias, colAlias);
-          }
 
           ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
               exp.getWritableObjectInspector(), tabAlias, false);
@@ -13997,7 +13995,6 @@ public class SemanticAnalyzer extends Ba
       for (String subqAlias : qb.getSubqAliases()) {
         QBExpr qbexpr = qb.getSubqForAlias(subqAlias);
         aliasToRel.put(subqAlias, genLogicalPlan(qbexpr));
-        qbexpr.setAlias(subqAlias);
       }
 
       // 1.2 Recurse over all the source tables

Added: hive/trunk/ql/src/test/queries/clientpositive/complex_alias.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/complex_alias.q?rev=1632177&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/complex_alias.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/complex_alias.q Wed Oct 15 21:01:48 2014
@@ -0,0 +1,46 @@
+CREATE TABLE agg1 (col0 INT, col1 STRING, col2 DOUBLE);
+
+INSERT INTO TABLE agg1 select key,value,key from src tablesample (1 rows);
+
+EXPLAIN
+SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 );
+
+SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 );

Modified: hive/trunk/ql/src/test/results/clientnegative/ambiguous_col.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ambiguous_col.q.out?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ambiguous_col.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/ambiguous_col.q.out Wed Oct 15 21:01:48 2014
@@ -1 +1 @@
-FAILED: SemanticException [Error 10007]: Ambiguous column reference key
+FAILED: SemanticException [Error 10007]: Ambiguous column reference key in a

Modified: hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out?rev=1632177&r1=1632176&r2=1632177&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ambiguous_col.q.out Wed Oct 15 21:01:48 2014
@@ -53,8 +53,8 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string)
-            outputColumnNames: _col0, _col1
+            expressions: _col0 (type: string), _col0 (type: string), _col1 (type: string)
+            outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
@@ -124,8 +124,8 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string)
-            outputColumnNames: _col0
+            expressions: _col0 (type: string), _col0 (type: string)
+            outputColumnNames: _col0, _col1
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
@@ -195,8 +195,8 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string)
-            outputColumnNames: _col0
+            expressions: _col0 (type: string), _col0 (type: string)
+            outputColumnNames: _col0, _col1
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false

Added: hive/trunk/ql/src/test/results/clientpositive/complex_alias.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/complex_alias.q.out?rev=1632177&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/complex_alias.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/complex_alias.q.out Wed Oct 15 21:01:48 2014
@@ -0,0 +1,269 @@
+PREHOOK: query: CREATE TABLE agg1 (col0 INT, col1 STRING, col2 DOUBLE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@agg1
+POSTHOOK: query: CREATE TABLE agg1 (col0 INT, col1 STRING, col2 DOUBLE)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@agg1
+PREHOOK: query: INSERT INTO TABLE agg1 select key,value,key from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@agg1
+POSTHOOK: query: INSERT INTO TABLE agg1 select key,value,key from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@agg1
+POSTHOOK: Lineage: agg1.col0 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: agg1.col1 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: agg1.col2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+Warning: Shuffle Join JOIN[19][tables = [single_use_subq12, single_use_subq11]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: EXPLAIN
+SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1, Stage-4
+  Stage-4 is a root stage
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: agg1
+            Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (col0 = col0) (type: boolean)
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Select Operator
+                expressions: '42' (type: string), col0 (type: int)
+                outputColumnNames: _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Union
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: _col2 (type: int), _col1 (type: string)
+                    outputColumnNames: _col2, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Group By Operator
+                      keys: _col2 (type: int), _col1 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: int), _col1 (type: string)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          TableScan
+            alias: agg1
+            Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (col0 = col0) (type: boolean)
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Select Operator
+                expressions: '41' (type: string), col0 (type: int)
+                outputColumnNames: _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Union
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: _col2 (type: int), _col1 (type: string)
+                    outputColumnNames: _col2, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Group By Operator
+                      keys: _col2 (type: int), _col1 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: int), _col1 (type: string)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: int), KEY._col1 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Select Operator
+            expressions: _col1 (type: string), _col1 (type: string)
+            outputColumnNames: _col1, _col2
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              value expressions: _col1 (type: string), _col2 (type: string)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 
+            1 {VALUE._col1} {VALUE._col2}
+          outputColumnNames: _col2, _col3
+          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Select Operator
+            expressions: _col2 (type: string), _col3 (type: string)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: agg1
+            Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: col0 (type: int), col2 (type: double)
+              outputColumnNames: col0, col2
+              Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: sum(col2)
+                keys: col0 (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: int)
+                  Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Select Operator
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[19][tables = [single_use_subq12, single_use_subq11]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 )
+PREHOOK: type: QUERY
+PREHOOK: Input: default@agg1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT single_use_subq11.a1 AS a1,
+       single_use_subq11.a2 AS a2
+FROM   (SELECT Sum(agg1.col2) AS a1
+        FROM   agg1
+        GROUP  BY agg1.col0) single_use_subq12
+       JOIN (SELECT alias.a2 AS a0,
+                    alias.a1 AS a1,
+                    alias.a1 AS a2
+             FROM   (SELECT agg1.col1 AS a0,
+                            '42'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1
+                     UNION ALL
+                     SELECT agg1.col1 AS a0,
+                            '41'      AS a1,
+                            agg1.col0 AS a2
+                     FROM   agg1) alias
+             GROUP  BY alias.a2,
+                       alias.a1) single_use_subq11
+         ON ( single_use_subq11.a0 = single_use_subq11.a0 )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@agg1
+#### A masked pattern was here ####
+42	42
+41	41