You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/05/08 06:30:31 UTC

svn commit: r1480157 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/processors/

Author: amareshwari
Date: Wed May  8 04:30:30 2013
New Revision: 1480157

URL: http://svn.apache.org/r1480157
Log:
Add support for join with conditions specified

Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java?rev=1480157&r1=1480156&r2=1480157&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java Wed May  8 04:30:30 2013
@@ -3,8 +3,6 @@ package org.apache.hadoop.hive.ql.cube.p
 import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 
-import java.util.Arrays;
-
 import org.antlr.runtime.CommonToken;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -45,7 +43,6 @@ public class AggregateResolver implement
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
     if (cubeql.getCube() == null) {
-      LOG.warn("AggregateResolver called without a cube setup. Returning");
       return;
     }
 
@@ -154,12 +151,9 @@ public class AggregateResolver implement
       return "";
     }
 
-    System.out.println("resolveAggregatesForExpr - " + exprTree);
-
     String exprTokens[] = StringUtils.split(exprTree, ",");
     for (int i = 0; i < exprTokens.length; i++) {
       String token = exprTokens[i].trim();
-      System.out.println("_NEW_TOKEN_[" + token + "]");
       String tokenAlias = cubeql.getAlias(token);
       boolean hasAlias = false;
       if (StringUtils.isNotBlank(tokenAlias)) {
@@ -179,14 +173,12 @@ public class AggregateResolver implement
             splits[j] = splits[j].trim();
           }
 
-          System.out.println(">> TOKEN:" + token + " SPLITS:" + Arrays.toString(splits));
           String msrName = (splits.length <= 1) ? splits[0] : splits[1];
           CubeMeasure measure = cubeql.getCube().getMeasureByName(msrName);
           if (measure != null) {
             String msrAggregate = measure.getAggregate();
 
             if (StringUtils.isNotBlank(msrAggregate)) {
-              System.out.println("#replace msrName:["+msrName+"] "+ " with msrAggregate["+msrAggregate+"]");
               exprTokens[i] = msrAggregate + "( " + token + ")" + (hasAlias ? " " + tokenAlias : "");
               exprTokens[i] = exprTokens[i].toLowerCase();
               // Add this expression to aggregate expr set so that group by resolver can skip
@@ -197,8 +189,6 @@ public class AggregateResolver implement
             }
           }
         }
-      } else {
-        System.out.println("Aggregate already specified: " + token);
       }
     }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1480157&r1=1480156&r2=1480157&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Wed May  8 04:30:30 2013
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.cube.pa
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.JoinCond;
+import org.apache.hadoop.hive.ql.parse.JoinType;
 import org.apache.hadoop.hive.ql.parse.QB;
 import org.apache.hadoop.hive.ql.parse.QBJoinTree;
 import org.apache.hadoop.hive.ql.parse.QBParseInfo;
@@ -59,6 +60,7 @@ public class CubeQueryContext {
   private List<String> cubeMeasureNames;
   private List<String> cubeDimNames;
   protected Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
+  private final Map<String, AbstractCubeTable> cubeTbls = new HashMap<String, AbstractCubeTable>();
   private final Map<AbstractCubeTable, List<String>> cubeTabToCols =
       new HashMap<AbstractCubeTable, List<String>>();
   protected Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
@@ -72,6 +74,7 @@ public class CubeQueryContext {
   private final Map<String, String> exprToAlias = new HashMap<String, String>();
   private final Set<String> aggregateCols = new HashSet<String>();
   private final Set<String> aggregateExprs = new HashSet<String>();
+  private final Map<QBJoinTree, String> joinConds = new HashMap<QBJoinTree, String>();
 
   // storage specific
   protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
@@ -164,10 +167,12 @@ public class CubeQueryContext {
           cubeCols.addAll(cubeMeasureNames);
           cubeCols.addAll(cubeDimNames);
           cubeTabToCols.put(cube, cubeCols);
+          cubeTbls.put(tblName.toLowerCase(), cube);
         } else if (client.isDimensionTable(tblName)) {
           CubeDimensionTable dim = client.getDimensionTable(tblName);
           dimensions.add(dim);
           cubeTabToCols.put(dim, MetastoreUtil.getColumnNames(dim));
+          cubeTbls.put(tblName.toLowerCase(), dim);
         }
       }
       if (cube == null && dimensions.size() == 0) {
@@ -562,52 +567,6 @@ public class CubeQueryContext {
             " preserved:" + cond.getPreserved());
       }
     }
-    if (joinTree.getExpressions() != null) {
-      builder.append("\n join expressions:");
-      for (ArrayList<ASTNode> exprs: joinTree.getExpressions()) {
-        builder.append("\n\t exprs:");
-        for (ASTNode expr : exprs) {
-          builder.append("\n\t\t expr:" + expr.dump());
-        }
-      }
-    }
-    if (joinTree.getFilters() != null) {
-      builder.append("\n join filters:");
-      for (ArrayList<ASTNode> exprs: joinTree.getFilters()) {
-        builder.append("\n\t filters:");
-        for (ASTNode expr : exprs) {
-          builder.append("\n\t\t expr:" + expr.dump());
-        }
-      }
-    }
-    if (joinTree.getFiltersForPushing() != null) {
-      builder.append("\n join filtersForPushing: ");
-      for (ArrayList<ASTNode> exprs: joinTree.getFiltersForPushing()) {
-        builder.append("\n\t filters:");
-        for (ASTNode expr : exprs) {
-          builder.append("\n\t\t expr:" + expr.dump());
-        }
-      }
-    }
-
-    if (joinTree.getNullSafes() != null) {
-      builder.append("\n join nullsafes: ");
-      for (Boolean bool: joinTree.getNullSafes()) {
-        builder.append("\n\t " + bool);
-      }
-    }
-    if (joinTree.getMapAliases() != null) {
-      builder.append("\n join mapaliases: ");
-      for (String alias : joinTree.getMapAliases()) {
-        builder.append("\n\t " + alias);
-      }
-    }
-    if (joinTree.getStreamAliases() != null) {
-      builder.append("\n join streamaliases: ");
-      for (String alias : joinTree.getStreamAliases()) {
-        builder.append("\n\t " + alias);
-      }
-    }
   }
 
   public String getSelectTree() {
@@ -668,9 +627,6 @@ public class CubeQueryContext {
   String getQueryFormat() {
     StringBuilder queryFormat = new StringBuilder();
     queryFormat.append(baseQueryFormat);
-    if (joinTree != null) {
-      queryFormat.append(" JOIN %s");
-    }
     if (getWhereTree() != null || hasPartitions()) {
       queryFormat.append(" WHERE %s");
     }
@@ -689,22 +645,11 @@ public class CubeQueryContext {
     return queryFormat.toString();
   }
 
-  private Object[] getQueryTreeStrings(String factStorageTable) {
+  private Object[] getQueryTreeStrings(String factStorageTable) throws SemanticException {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(getSelectTree());
-    String fromString = HQLParser.getString(getFromTree()).toLowerCase();
     String whereString = getWhereTree(factStorageTable);
-    for (Map.Entry<AbstractCubeTable, String> entry :
-      storageTableToQuery.entrySet()) {
-      String src = entry.getKey().getName().toLowerCase();
-      String alias = getAliasForTabName(src);
-      System.out.println("From string:" + fromString + " src:" + src + " value:" + entry.getValue());
-      fromString = fromString.replaceAll(src, entry.getValue() + " " + alias);
-    }
-    qstrs.add(fromString);
-    if (joinTree != null) {
-      qstrs.add(HQLParser.getString(joinTree));
-    }
+    qstrs.add(getFromString());
     if (whereString != null) {
       qstrs.add(whereString);
     }
@@ -723,7 +668,70 @@ public class CubeQueryContext {
     return qstrs.toArray(new String[0]);
   }
 
-  private String toHQL(String tableName) {
+  private String getFromString() throws SemanticException {
+    String fromString = null;
+    if (joinTree == null) {
+      if (cube != null) {
+        fromString = storageTableToQuery.get(cube) + " " + getAliasForTabName(cube.getName());
+      } else {
+        CubeDimensionTable dim = dimensions.iterator().next();
+        fromString = storageTableToQuery.get(dim) + " " + getAliasForTabName(dim.getName());
+      }
+    } else {
+      StringBuilder builder = new StringBuilder();
+      /*printJoinTree(qb.getQbJoinTree(), builder);
+      System.out.println(builder.toString());
+      builder = new StringBuilder();*/
+      getQLString(qb.getQbJoinTree(), builder);
+      fromString = builder.toString();
+    }
+    return fromString;
+  }
+
+  private void getQLString(QBJoinTree joinTree, StringBuilder builder) throws SemanticException {
+    if (joinTree.getBaseSrc()[0] == null){
+      if (joinTree.getJoinSrc() != null) {
+        getQLString(joinTree.getJoinSrc(), builder);
+      }
+    } else { // (joinTree.getBaseSrc()[0] != null){
+      String tblName = joinTree.getBaseSrc()[0].toLowerCase();
+      builder.append(storageTableToQuery.get(cubeTbls.get(tblName)) + " " + getAliasForTabName(tblName));
+    }
+    if (joinTree.getJoinCond() != null) {
+      builder.append(getString(joinTree.getJoinCond()[0].getJoinType()));
+      builder.append("JOIN ");
+    }
+    if (joinTree.getBaseSrc()[1] == null){
+      if (joinTree.getJoinSrc() != null) {
+        getQLString(joinTree.getJoinSrc(), builder);
+      }
+    } else { // (joinTree.getBaseSrc()[1] != null){
+      String tblName = joinTree.getBaseSrc()[1].toLowerCase();
+      builder.append(storageTableToQuery.get(cubeTbls.get(tblName)) + " " + getAliasForTabName(tblName));
+    }
+
+    String joinCond = joinConds.get(joinTree);
+    if (joinCond != null) {
+      builder.append(" ON ");
+      builder.append(joinCond);
+    } else {
+      throw new SemanticException("No join condition available");
+    }
+  }
+
+  private String getString(JoinType joinType) {
+    switch (joinType) {
+    case INNER: return " INNER ";
+    case LEFTOUTER: return " LEFT OUTER ";
+    case RIGHTOUTER: return " RIGHT OUTER ";
+    case FULLOUTER: return " FULL OUTER ";
+    case UNIQUE: return " UNIQUE ";
+    case LEFTSEMI: return " LEFT SEMI ";
+    }
+    return null;
+  }
+
+  private String toHQL(String tableName) throws SemanticException {
     String qfmt = getQueryFormat();
     System.out.println("qfmt:" + qfmt);
     return String.format(qfmt, getQueryTreeStrings(tableName));
@@ -798,7 +806,6 @@ public class CubeQueryContext {
     if (fact == null && !hasDimensionInQuery()) {
       throw new SemanticException("No valid fact table available");
     }
-    //print();
 
     if (fact != null) {
       Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(fact);
@@ -898,7 +905,6 @@ public class CubeQueryContext {
     }
 
     String[] split = StringUtils.split(col, ".");
-    System.out.println("Looking for col [" + col + "] split: " + split.length);
     if (split.length <= 1) {
       return cubeMeasureNames.contains(col);
     } else {
@@ -907,7 +913,6 @@ public class CubeQueryContext {
       if (cubeName.equalsIgnoreCase(cube.getName()) ||
           cubeName.equalsIgnoreCase(getAliasForTabName(cube.getName()))) {
         boolean ismeasure = cubeMeasureNames.contains(colName);
-        System.out.println(colName + " IS MEASURE? " + ismeasure);
         return cubeMeasureNames.contains(colName);
       } else {
         return false;
@@ -954,4 +959,12 @@ public class CubeQueryContext {
     return selectAST;
   }
 
+  public Map<QBJoinTree, String> getJoinConds() {
+    return joinConds;
+  }
+
+  public void setJoinCond(QBJoinTree qb, String cond) {
+    joinConds.put(qb, cond);
+  }
+
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java?rev=1480157&r1=1480156&r2=1480157&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java Wed May  8 04:30:30 2013
@@ -1,13 +1,8 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.JoinCond;
@@ -33,13 +28,13 @@ public class JoinResolver implements Con
     QB cubeQB = cubeql.getQB();
     if (cubeQB.getParseInfo().getJoinExpr() != null) {
       cubeQB.setQbJoinTree(genJoinTree(cubeQB,
-          cubeQB.getParseInfo().getJoinExpr()));
+          cubeQB.getParseInfo().getJoinExpr(), cubeql));
     } else {
       LOG.info("No join expr available");
     }
   }
 
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree)
+  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql)
       throws SemanticException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
@@ -89,7 +84,7 @@ public class JoinResolver implements Con
       children[0] = alias;
       joinTree.setBaseSrc(children);
     } else if (isJoinToken(left)) {
-      QBJoinTree leftTree = genJoinTree(qb, left);
+      QBJoinTree leftTree = genJoinTree(qb, left, cubeql);
       joinTree.setJoinSrc(leftTree);
       String[] leftChildAliases = leftTree.getLeftAliases();
       String leftAliases[] = new String[leftChildAliases.length + 1];
@@ -126,91 +121,13 @@ public class JoinResolver implements Con
       assert false;
     }
 
-    ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<
-        ArrayList<ASTNode>>();
-    expressions.add(new ArrayList<ASTNode>());
-    expressions.add(new ArrayList<ASTNode>());
-    joinTree.setExpressions(expressions);
-
-    ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
-    joinTree.setNullSafes(nullsafes);
-
-    ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
-    filters.add(new ArrayList<ASTNode>());
-    filters.add(new ArrayList<ASTNode>());
-    joinTree.setFilters(filters);
-
-    ArrayList<ArrayList<ASTNode>> filtersForPushing =
-        new ArrayList<ArrayList<ASTNode>>();
-    filtersForPushing.add(new ArrayList<ASTNode>());
-    filtersForPushing.add(new ArrayList<ASTNode>());
-    joinTree.setFiltersForPushing(filtersForPushing);
-
     ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
-    ArrayList<String> leftSrc = new ArrayList<String>();
-    parseJoinCondition(joinTree, joinCond, leftSrc);
-    if (leftSrc.size() == 1) {
-      joinTree.setLeftAlias(leftSrc.get(0));
+    if (joinCond != null) {
+      cubeql.setJoinCond(joinTree, HQLParser.getString(joinCond));
     }
-
-    // check the hints to see if the user has specified a map-side join. This
-    // will be removed later on, once the cost-based
-    // infrastructure is in place
-    if (qb.getParseInfo().getHints() != null) {
-      List<String> mapSideTables = getMapSideJoinTables(qb);
-      List<String> mapAliases = joinTree.getMapAliases();
-
-      for (String mapTbl : mapSideTables) {
-        boolean mapTable = false;
-        for (String leftAlias : joinTree.getLeftAliases()) {
-          if (mapTbl.equalsIgnoreCase(leftAlias)) {
-            mapTable = true;
-          }
-        }
-        for (String rightAlias : joinTree.getRightAliases()) {
-          if (mapTbl.equalsIgnoreCase(rightAlias)) {
-            mapTable = true;
-          }
-        }
-
-        if (mapTable) {
-          if (mapAliases == null) {
-            mapAliases = new ArrayList<String>();
-          }
-          mapAliases.add(mapTbl);
-          joinTree.setMapSideJoin(true);
-        }
-      }
-
-      joinTree.setMapAliases(mapAliases);
-
-      parseStreamTables(joinTree, qb);
-    }
-
     return joinTree;
   }
 
-  private List<String> getMapSideJoinTables(QB qb) {
-    List<String> cols = new ArrayList<String>();
-    ASTNode hints = qb.getParseInfo().getHints();
-    for (int pos = 0; pos < hints.getChildCount(); pos++) {
-      ASTNode hint = (ASTNode) hints.getChild(pos);
-      if (((ASTNode) hint.getChild(0)).getToken().getType()
-          == HiveParser.TOK_MAPJOIN) {
-        ASTNode hintTblNames = (ASTNode) hint.getChild(1);
-        int numCh = hintTblNames.getChildCount();
-        for (int tblPos = 0; tblPos < numCh; tblPos++) {
-          String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText()
-              .toLowerCase();
-          if (!cols.contains(tblName)) {
-            cols.add(tblName);
-          }
-        }
-      }
-    }
-    return cols;
-  }
-
   private boolean isJoinToken(ASTNode node) {
     if ((node.getToken().getType() == HiveParser.TOK_JOIN)
         || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
@@ -222,302 +139,4 @@ public class JoinResolver implements Con
     }
     return false;
   }
-
-  private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond,
-      ArrayList<String> leftSrc) throws SemanticException {
-    if (joinCond == null) {
-      return;
-    }
-
-    JoinType type = joinTree.getJoinCond()[0].getJoinType();
-    switch (joinCond.getToken().getType()) {
-    case HiveParser.KW_OR:
-      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_3
-          .getMsg(joinCond));
-
-    case HiveParser.KW_AND:
-      parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(0), leftSrc);
-      parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(1), leftSrc);
-      break;
-
-    case HiveParser.EQUAL_NS:
-    case HiveParser.EQUAL:
-      ASTNode leftCondn = (ASTNode) joinCond.getChild(0);
-      ArrayList<String> leftCondAl1 = new ArrayList<String>();
-      ArrayList<String> leftCondAl2 = new ArrayList<String>();
-      parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
-          null);
-
-      ASTNode rightCondn = (ASTNode) joinCond.getChild(1);
-      ArrayList<String> rightCondAl1 = new ArrayList<String>();
-      ArrayList<String> rightCondAl2 = new ArrayList<String>();
-      parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
-          rightCondAl2, null);
-
-      // is it a filter or a join condition
-      // if it is filter see if it can be pushed above the join
-      // filter cannot be pushed if
-      // * join is full outer or
-      // * join is left outer and filter is on left alias or
-      // * join is right outer and filter is on right alias
-      if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
-          || ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
-        throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
-            .getMsg(joinCond));
-      }
-
-      if (leftCondAl1.size() != 0) {
-        if ((rightCondAl1.size() != 0)
-            || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
-          if (type.equals(JoinType.LEFTOUTER) ||
-              type.equals(JoinType.FULLOUTER)) {
-            joinTree.getFilters().get(0).add(joinCond);
-          } else {
-            joinTree.getFiltersForPushing().get(0).add(joinCond);
-          }
-        } else if (rightCondAl2.size() != 0) {
-          populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
-              leftSrc);
-          populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
-              leftSrc);
-          boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
-          joinTree.getNullSafes().add(nullsafe);
-        }
-      } else if (leftCondAl2.size() != 0) {
-        if ((rightCondAl2.size() != 0)
-            || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
-          if (type.equals(JoinType.RIGHTOUTER)
-              || type.equals(JoinType.FULLOUTER)) {
-            joinTree.getFilters().get(1).add(joinCond);
-          } else {
-            joinTree.getFiltersForPushing().get(1).add(joinCond);
-          }
-        } else if (rightCondAl1.size() != 0) {
-          populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
-              leftSrc);
-          populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
-              leftSrc);
-          boolean nullsafe = joinCond.getToken().getType() ==
-              HiveParser.EQUAL_NS;
-          joinTree.getNullSafes().add(nullsafe);
-        }
-      } else if (rightCondAl1.size() != 0) {
-        if (type.equals(JoinType.LEFTOUTER)
-            || type.equals(JoinType.FULLOUTER)) {
-          joinTree.getFilters().get(0).add(joinCond);
-        } else {
-          joinTree.getFiltersForPushing().get(0).add(joinCond);
-        }
-      } else {
-        if (type.equals(JoinType.RIGHTOUTER)
-            || type.equals(JoinType.FULLOUTER)) {
-          joinTree.getFilters().get(1).add(joinCond);
-        } else {
-          joinTree.getFiltersForPushing().get(1).add(joinCond);
-        }
-      }
-
-      break;
-
-    default:
-      boolean isFunction = (joinCond.getType() == HiveParser.TOK_FUNCTION);
-
-      // Create all children
-      int childrenBegin = (isFunction ? 1 : 0);
-      ArrayList<ArrayList<String>> leftAlias = new ArrayList<ArrayList<String>>(
-          joinCond.getChildCount() - childrenBegin);
-      ArrayList<ArrayList<String>> rightAlias = new ArrayList<ArrayList<String>>(
-          joinCond.getChildCount() - childrenBegin);
-      for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) {
-        ArrayList<String> left = new ArrayList<String>();
-        ArrayList<String> right = new ArrayList<String>();
-        leftAlias.add(left);
-        rightAlias.add(right);
-      }
-
-      for (int ci = childrenBegin; ci < joinCond.getChildCount(); ci++) {
-        parseJoinCondPopulateAlias(joinTree, (ASTNode) joinCond.getChild(ci),
-            leftAlias.get(ci - childrenBegin), rightAlias.get(ci
-                - childrenBegin), null);
-      }
-
-      boolean leftAliasNull = true;
-      for (ArrayList<String> left : leftAlias) {
-        if (left.size() != 0) {
-          leftAliasNull = false;
-          break;
-        }
-      }
-
-      boolean rightAliasNull = true;
-      for (ArrayList<String> right : rightAlias) {
-        if (right.size() != 0) {
-          rightAliasNull = false;
-          break;
-        }
-      }
-
-      if (!leftAliasNull && !rightAliasNull) {
-        throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
-            .getMsg(joinCond));
-      }
-
-      if (!leftAliasNull) {
-        if (type.equals(JoinType.LEFTOUTER)
-            || type.equals(JoinType.FULLOUTER)) {
-           joinTree.getFilters().get(0).add(joinCond);
-        } else {
-          joinTree.getFiltersForPushing().get(0).add(joinCond);
-        }
-      } else {
-        if (type.equals(JoinType.RIGHTOUTER)
-            || type.equals(JoinType.FULLOUTER)) {
-          joinTree.getFilters().get(1).add(joinCond);
-        } else {
-          joinTree.getFiltersForPushing().get(1).add(joinCond);
-        }
-      }
-
-      break;
-    }
-  }
-
-  private boolean isPresent(String[] list, String elem) {
-    for (String s : list) {
-      if (s.toLowerCase().equals(elem)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  private void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn,
-      ArrayList<String> leftAliases, ArrayList<String> rightAliases,
-      ArrayList<String> fields) throws SemanticException {
-    // String[] allAliases = joinTree.getAllAliases();
-    switch (condn.getToken().getType()) {
-    case HiveParser.TOK_TABLE_OR_COL:
-      String tableOrCol = SemanticAnalyzer.unescapeIdentifier(
-          condn.getChild(0).getText().toLowerCase());
-      if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
-        if (!leftAliases.contains(tableOrCol)) {
-          leftAliases.add(tableOrCol);
-        }
-      } else if (isPresent(joinTree.getRightAliases(), tableOrCol)) {
-        if (!rightAliases.contains(tableOrCol)) {
-          rightAliases.add(tableOrCol);
-        }
-      } else {
-        // We don't support columns without table prefix in JOIN condition right
-        // now.
-        // We need to pass Metadata here to know which table the column belongs
-        // to.
-        throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(condn
-            .getChild(0)));
-      }
-      break;
-
-    case HiveParser.Identifier:
-      // it may be a field name, return the identifier and let the caller decide
-      // whether it is or not
-      if (fields != null) {
-        fields.add(SemanticAnalyzer.unescapeIdentifier(
-            condn.getToken().getText().toLowerCase()));
-      }
-      break;
-    case HiveParser.Number:
-    case HiveParser.StringLiteral:
-    case HiveParser.TOK_STRINGLITERALSEQUENCE:
-    case HiveParser.TOK_CHARSETLITERAL:
-    case HiveParser.KW_TRUE:
-    case HiveParser.KW_FALSE:
-      break;
-
-    case HiveParser.TOK_FUNCTION:
-      // check all the arguments
-      for (int i = 1; i < condn.getChildCount(); i++) {
-        parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(i),
-            leftAliases, rightAliases, null);
-      }
-      break;
-
-    default:
-      // This is an operator - so check whether it is unary or binary operator
-      if (condn.getChildCount() == 1) {
-        parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
-            leftAliases, rightAliases, null);
-      } else if (condn.getChildCount() == 2) {
-
-        ArrayList<String> fields1 = null;
-        // if it is a dot operator, remember the field name of the rhs of the
-        // left semijoin
-        if (joinTree.getNoSemiJoin() == false
-            && condn.getToken().getType() == HiveParser.DOT) {
-          // get the semijoin rhs table name and field name
-          fields1 = new ArrayList<String>();
-          int rhssize = rightAliases.size();
-          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
-              leftAliases, rightAliases, null);
-          String rhsAlias = null;
-
-          if (rightAliases.size() > rhssize) { // the new table is rhs table
-            rhsAlias = rightAliases.get(rightAliases.size() - 1);
-          }
-          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
-              leftAliases, rightAliases, fields1);
-          if (rhsAlias != null && fields1.size() > 0) {
-            joinTree.addRHSSemijoinColumns(rhsAlias, condn);
-          }
-        } else {
-          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
-              leftAliases, rightAliases, null);
-          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
-              leftAliases, rightAliases, fields1);
-        }
-      } else {
-        throw new SemanticException(condn.toStringTree() + " encountered with "
-            + condn.getChildCount() + " children");
-      }
-      break;
-    }
-  }
-
-  private void populateAliases(ArrayList<String> leftAliases,
-      ArrayList<String> rightAliases, ASTNode condn, QBJoinTree joinTree,
-      ArrayList<String> leftSrc) throws SemanticException {
-    if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) {
-      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
-          .getMsg(condn));
-    }
-    if (rightAliases.size() != 0) {
-      assert rightAliases.size() == 1;
-      joinTree.getExpressions().get(1).add(condn);
-    } else if (leftAliases.size() != 0) {
-      joinTree.getExpressions().get(0).add(condn);
-      for (String s : leftAliases) {
-        if (!leftSrc.contains(s)) {
-          leftSrc.add(s);
-        }
-      }
-    } else {
-      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_2
-          .getMsg(condn));
-    }
-  }
-
-  private void parseStreamTables(QBJoinTree joinTree, QB qb) {
-    List<String> streamAliases = joinTree.getStreamAliases();
-    for (Node hintNode : qb.getParseInfo().getHints().getChildren()) {
-      ASTNode hint = (ASTNode) hintNode;
-      if (hint.getChild(0).getType() == HiveParser.TOK_STREAMTABLE) {
-        for (int i = 0; i < hint.getChild(1).getChildCount(); i++) {
-          if (streamAliases == null) {
-            streamAliases = new ArrayList<String>();
-          }
-          streamAliases.add(hint.getChild(1).getChild(i).getText());
-        }
-      }
-    }
-    joinTree.setStreamAliases(streamAliases);
-  }
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1480157&r1=1480156&r2=1480157&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Wed May  8 04:30:30 2013
@@ -163,7 +163,8 @@ public class CubeTestSetup {
         storageAggregatePeriods);
   }
 
-  private void createDimWithTwoStorages(CubeMetastoreClient client)
+  //DimWithTwoStorages
+  private void createCityTbale(CubeMetastoreClient client)
       throws HiveException {
     String dimName = "citytable";
 
@@ -171,10 +172,12 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("id", "int", "code"));
     dimColumns.add(new FieldSchema("name", "string", "field1"));
     dimColumns.add(new FieldSchema("stateid", "int", "state id"));
+    dimColumns.add(new FieldSchema("zipcode", "int", "zip code"));
 
     Map<String, TableReference> dimensionReferences =
         new HashMap<String, TableReference>();
     dimensionReferences.put("stateid", new TableReference("statetable", "id"));
+    dimensionReferences.put("zipcode", new TableReference("ziptable", "code"));
 
     Storage hdfsStorage1 = new HDFSStorage("C1",
         TextInputFormat.class.getCanonicalName(),
@@ -190,14 +193,83 @@ public class CubeTestSetup {
         snapshotDumpPeriods);
   }
 
+  private void createZiptable(CubeMetastoreClient client) throws Exception {
+    String dimName = "ziptable";
+
+    List<FieldSchema>  dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("code", "int", "code"));
+    dimColumns.add(new FieldSchema("f1", "string", "field1"));
+    dimColumns.add(new FieldSchema("f2", "string", "field2"));
+
+    Map<String, TableReference> dimensionReferences =
+        new HashMap<String, TableReference>();
+    Map<Storage, UpdatePeriod> snapshotDumpPeriods =
+        new HashMap<Storage, UpdatePeriod>();
+    Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+    Storage hdfsStorage = new HDFSStorage("C1",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    snapshotDumpPeriods.put(hdfsStorage, UpdatePeriod.HOURLY);
+    dumpPeriods.put(hdfsStorage.getName(), UpdatePeriod.HOURLY);
+    client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
+        snapshotDumpPeriods);
+  }
+
+  private void createCountryTable(CubeMetastoreClient client) throws Exception {
+    String dimName = "countrytable";
+
+    List<FieldSchema>  dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "code"));
+    dimColumns.add(new FieldSchema("name", "string", "field1"));
+    dimColumns.add(new FieldSchema("capital", "string", "field2"));
+    dimColumns.add(new FieldSchema("region", "string", "region name"));
+
+    Storage hdfsStorage = new HDFSStorage("C1",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    Map<String, TableReference> dimensionReferences =
+        new HashMap<String, TableReference>();
+    Map<Storage, UpdatePeriod> snapshotDumpPeriods =
+        new HashMap<Storage, UpdatePeriod>();
+    snapshotDumpPeriods.put(hdfsStorage, null);
+    client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
+        snapshotDumpPeriods);
+  }
+
+  private void createStateTable(CubeMetastoreClient client) throws Exception {
+    String dimName = "statetable";
+
+    List<FieldSchema>  dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "code"));
+    dimColumns.add(new FieldSchema("name", "string", "field1"));
+    dimColumns.add(new FieldSchema("capital", "string", "field2"));
+    dimColumns.add(new FieldSchema("countryid", "string", "region name"));
+
+    Map<String, TableReference> dimensionReferences =
+        new HashMap<String, TableReference>();
+    dimensionReferences.put("countryid", new TableReference("countrytable", "id"));
+
+    Storage hdfsStorage = new HDFSStorage("C1",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    Map<Storage, UpdatePeriod> snapshotDumpPeriods =
+        new HashMap<Storage, UpdatePeriod>();
+    snapshotDumpPeriods.put(hdfsStorage, UpdatePeriod.HOURLY);
+    client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
+        snapshotDumpPeriods);
+  }
+
   public void createSources() throws Exception {
     CubeMetastoreClient client =  CubeMetastoreClient.getInstance(
         new HiveConf(this.getClass()));
     createCube(client);
     createCubeFact(client);
     createCubeFactOnlyHourly(client);
-    createDimWithTwoStorages(client);
+    createCityTbale(client);
     createCubeFactMonthly(client);
+    createZiptable(client);
+    createCountryTable(client);
+    createStateTable(client);
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1480157&r1=1480156&r2=1480157&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Wed May  8 04:30:30 2013
@@ -1,7 +1,6 @@
 package org.apache.hadoop.hive.ql.cube.processors;
 
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Date;
 
@@ -60,7 +59,7 @@ public class TestCubeDriver {
     Throwable th = null;
     try {
       String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-    		" where time_range_in('NOW - 2DAYS', 'NOW')");
+          " where time_range_in('NOW - 2DAYS', 'NOW')");
     } catch (SemanticException e) {
       th = e;
       e.printStackTrace();
@@ -75,9 +74,9 @@ public class TestCubeDriver {
     Throwable th = null;
     try {
       String hqlQuery = driver.compileCubeQuery("select dim1, SUM(msr2)" +
-        " from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+          " from testCube" +
+          " where time_range_in('" + getDateUptoHours(twodaysBack)
+          + "','" + getDateUptoHours(now) + "')");
     } catch (SemanticException e) {
       th = e;
       e.printStackTrace();
@@ -111,6 +110,20 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
+    hqlQuery = driver.compileCubeQuery("select statetable.name, SUM(msr2) from testCube"
+        + " join citytable on testCube.cityid = citytable.id"
+        + " left outer join statetable on statetable.id = citytable.stateid"
+        + " right outer join ziptable on citytable.zipcode = ziptable.code"
+        + " where time_range_in('" + getDateUptoHours(twodaysBack)
+        + "','" + getDateUptoHours(now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
+
+    hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
+        + " join countrytable on testCube.countryid = countrytable.id"
+        + " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
+        + "','" + getDateUptoMonth(now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
+
   }
 
   @Test
@@ -171,7 +184,7 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')"
         + " group by round(zipcode)");
     System.out.println("cube hql:" + hqlQuery);
-   */
+     */
   }
 
   @Test
@@ -186,13 +199,16 @@ public class TestCubeDriver {
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
+    try {
     hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from testCube" +
-    		" join citytable" +
+        " join citytable" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')" +
         " group by name");
     System.out.println("cube hql:" + hqlQuery);
-
+    } catch (SemanticException e) {
+      e.printStackTrace();
+    }
     hqlQuery = driver.compileCubeQuery("select SUM(mycube.msr2) from testCube mycube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
@@ -241,6 +257,9 @@ public class TestCubeDriver {
     String hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
     System.out.println("cube hql:" + hqlQuery);
 
+    hqlQuery = driver.compileCubeQuery("select name, c.stateid from citytable c");
+    System.out.println("cube hql:" + hqlQuery);
+
     conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
@@ -282,33 +301,28 @@ public class TestCubeDriver {
 
     String timeRange = " where  time_range_in('2013-05-01', '2013-05-03')";
     System.out.println("#$AGGREGATE_RESOLVER_ TIME_RANGE:" + timeRange);
-    String q1 = "SELECT countryid, testCube.msr2 from testCube " + timeRange;
-    String q2 = "SELECT countryid, testCube.msr2 * testCube.msr2 from testCube " + timeRange;
-    String q3 = "SELECT countryid, sum(testCube.msr2) from testCube " + timeRange;
-    String q4 = "SELECT countryid, sum(testCube.msr2) from testCube "  + timeRange
+    String q1 = "SELECT cityid, testCube.msr2 from testCube " + timeRange;
+    String q2 = "SELECT cityid, testCube.msr2 * testCube.msr2 from testCube " + timeRange;
+    String q3 = "SELECT cityid, sum(testCube.msr2) from testCube " + timeRange;
+    String q4 = "SELECT cityid, sum(testCube.msr2) from testCube "  + timeRange
         + " having testCube.msr2 > 100";
-    String q5 = "SELECT countryid, testCube.msr2 from testCube " + timeRange
+    String q5 = "SELECT cityid, testCube.msr2 from testCube " + timeRange
         + " having testCube.msr2 + testCube.msr2 > 100";
-    String q6 = "SELECT countryid, testCube.msr2 from testCube " + timeRange
+    String q6 = "SELECT cityid, testCube.msr2 from testCube " + timeRange
         + " having testCube.msr2 > 100 AND testCube.msr2 < 100";
-    String q7 = "SELECT countryid, sum(testCube.msr2) from testCube " + timeRange
+    String q7 = "SELECT cityid, sum(testCube.msr2) from testCube " + timeRange
         + " having (testCube.msr2 > 100) OR (testcube.msr2 < 100 AND SUM(testcube.msr3) > 1000)";
 
     String tests[] = {q1, q2, q3, q4, q5, q6, q7};
 
-    int exceptions[] = new int[tests.length];
-    for (int i = 0; i < tests.length; i++) {
-      String hql = null;
-      try {
-        hql = driver.compileCubeQuery(tests[i]);
-      } catch (SemanticException exc) {
-        exceptions[i] = i;
-        exc.printStackTrace();
+    try {
+      for (int i = 0; i < tests.length; i++) {
+        String hql = driver.compileCubeQuery(tests[i]);
+        System.out.println("cube hql:" + hql);
       }
-      System.out.println("##----AGGREGATE_RESOLVER_CUBEQL----#" + i + " [" + tests[i] + " ]");
-      System.out.println("##----AGGREGATE_RESOLVER_HQL-----#" + i + " [ " + hql + " ]");
+    } catch (SemanticException e) {
+      e.printStackTrace();
     }
-    System.out.println("##---AGGREGATE_RESOLVER_ exceptions=" + Arrays.toString(exceptions) );
   }
 
 }