You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/03/14 09:16:25 UTC

svn commit: r1456361 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/cube/parse/ java/org/apache/hadoop/hive/ql/cube/processors/ java/org/apache/hadoop/hive/ql/processors/ test/org/apache/hadoop/hive...

Author: amareshwari
Date: Thu Mar 14 08:16:24 2013
New Revision: 1456361

URL: http://svn.apache.org/r1456361
Log:
Add cube query processing

Added:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckColumnMapping.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckDateRange.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckTableNames.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ContextRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java
Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1456361&r1=1456360&r2=1456361&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Mar 14 08:16:24 2013
@@ -110,7 +110,7 @@ import org.apache.hadoop.util.Reflection
 public class Driver implements CommandProcessor {
 
   static final private Log LOG = LogFactory.getLog(Driver.class.getName());
-  static final private LogHelper console = new LogHelper(LOG);
+  static final protected LogHelper console = new LogHelper(LOG);
 
   private static final Object compileMonitor = new Object();
   
@@ -124,8 +124,8 @@ public class Driver implements CommandPr
   private Schema schema;
   private HiveLockManager hiveLockMgr;
 
-  private String errorMessage;
-  private String SQLState;
+  protected String errorMessage;
+  protected String SQLState;
 
   // A limit on the number of threads that can be launched
   private int maxthreads;
@@ -327,6 +327,10 @@ public class Driver implements CommandPr
     }
   }
 
+  protected HiveConf getConf() {
+    return conf;
+  }
+
   /**
    * Compile a new query. Any currently-planned query associated with this Driver is discarded.
    *

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,18 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class AggregateResolver implements ContextRewriter {
+
+  public AggregateResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) {
+    // TODO
+    // replace select and having columns with default aggregate functions on
+    // them, if default aggregate is defined
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,26 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class AliasReplacer implements ContextRewriter {
+
+  public AliasReplacer(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) {
+
+    // Rewrite the all the columns in the query with table alias prefixed.
+    // If col1 of table tab1 is accessed, it would be changed as tab1.col1.
+    // If tab1 is already aliased say with t1, col1 is changed as t1.col1
+    // replace the columns in select, groupby, having, orderby by
+    // prepending the table alias to the col
+    //sample select trees
+    // 1: (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))
+    // (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL value))))
+    // 2: (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))
+    // (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) value))))
+    // 3: (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) srckey))))
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CandidateTableResolver implements ContextRewriter {
+
+  public CandidateTableResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+    // TODO Auto-generated method stub
+    // Find the candidate fact tables who can answer the query and
+    // add them to cubeql
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckColumnMapping.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckColumnMapping.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckColumnMapping.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckColumnMapping.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,18 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CheckColumnMapping extends ValidationRule {
+
+  public CheckColumnMapping(Configuration conf) {
+    super(conf);
+  }
+
+  @Override
+  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+    // TODO
+    return true;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckDateRange.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckDateRange.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckDateRange.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckDateRange.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,31 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.Date;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class CheckDateRange extends ValidationRule {
+
+  public CheckDateRange(Configuration conf) {
+    super(conf);
+  }
+
+  @Override
+  public boolean validate(CubeQueryContext ctx) {
+    Date from = ctx.getFromDate();
+    Date to = ctx.getToDate();
+
+    if (from == null || to == null) {
+      error = "From or to date is missing";
+      return false;
+    }
+
+    if (from.compareTo(to) > 0) {
+      error = "From date is after the to date";
+      return false;
+    }
+
+    return true;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckTableNames.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckTableNames.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckTableNames.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CheckTableNames.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,18 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CheckTableNames extends ValidationRule {
+
+  public CheckTableNames(Configuration conf) {
+    super(conf);
+  }
+
+  @Override
+  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+    // TODO
+    return true;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ContextRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ContextRewriter.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ContextRewriter.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ContextRewriter.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,7 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public interface ContextRewriter {
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException;
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,437 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_FUNCTION;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.cube.metadata.Cube;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeDimensionTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.JoinCond;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.QBJoinTree;
+import org.apache.hadoop.hive.ql.parse.QBParseInfo;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+public class CubeQueryContext {
+  public static final String TIME_RANGE_FUNC = "time_range_in";
+  public static final String NOW = "now";
+  public static final String DEFAULT_TABLE = "_default_";
+
+  private final ASTNode ast;
+  private final QB qb;
+  private final HiveConf conf;
+  private String fromDateRaw;
+  private String toDateRaw;
+  private Cube cube;
+  private Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
+  private Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
+  private final Map<String, List<String>> tblToColumns = new HashMap<String, List<String>>();
+  private Date timeFrom;
+  private Date timeTo;
+  private String clauseName = null;
+  private Map<String, List<String>> partitionCols;
+
+  public CubeQueryContext(ASTNode ast, QB qb, HiveConf conf)
+      throws SemanticException {
+    this.ast = ast;
+    this.qb = qb;
+    this.conf = conf;
+    //extractTimeRange();
+    //extractMetaTables();
+  }
+
+  public CubeQueryContext(CubeQueryContext other) {
+    this.ast = other.ast;
+    this.qb = other.cloneqb();
+    this.conf = other.conf;
+    this.fromDateRaw = other.fromDateRaw;
+    this.toDateRaw = other.toDateRaw;
+    this.dimensions = other.dimensions;
+    this.candidateFactTables = other.candidateFactTables;
+    this.timeFrom = other.timeFrom;
+    this.timeTo = other.timeTo;
+    this.partitionCols = other.partitionCols;
+  }
+
+  private QB cloneqb() {
+    //TODO do deep copy of QB
+    return qb;
+  }
+
+  private void extractMetaTables() throws SemanticException {
+    try {
+      CubeMetastoreClient client;
+        client = CubeMetastoreClient.getInstance(conf);
+      List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
+      for (String alias :  tabAliases) {
+        String tblName = qb.getTabNameForAlias(alias);
+        if (client.isCube(tblName)) {
+          if (cube != null) {
+            if (cube.getName() != tblName) {
+              throw new SemanticException("More than one cube accessed in query");
+            }
+          }
+          cube = client.getCube(tblName);
+        } else if (client.isDimensionTable(tblName)) {
+          dimensions.add(client.getDimensionTable(tblName));
+        }
+      }
+    } catch (HiveException e) {
+      throw new SemanticException(e);
+    }
+  }
+
+  private String getClause() {
+    if (clauseName == null) {
+      TreeSet<String> ks = new TreeSet<String>(qb.getParseInfo().getClauseNames());
+      clauseName = ks.first();
+    }
+    return clauseName;
+  }
+
+  private void extractTimeRange() throws SemanticException {
+    // get time range -
+    // Time range should be direct child of where condition
+    // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
+    // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
+    ASTNode whereTree = qb.getParseInfo().getWhrForClause(clauseName);
+    if (whereTree == null || whereTree.getChildCount() < 1) {
+      throw new SemanticException("No filter specified");
+    }
+    ASTNode timenode = null;
+    if (TOK_FUNCTION == whereTree.getChild(0).getType()) {
+      // expect only time range
+      timenode = HQLParser.findNodeByPath(whereTree, TOK_FUNCTION);
+    } else if (KW_AND == whereTree.getChild(0).getType()){
+      // expect time condition as the right sibling of KW_AND
+      timenode = HQLParser.findNodeByPath(whereTree, KW_AND, TOK_FUNCTION);
+    }
+
+    if (timenode == null) {
+      throw new SemanticException("Unable to get time range");
+    }
+
+    ASTNode fname = HQLParser.findNodeByPath(timenode, Identifier);
+    if (!TIME_RANGE_FUNC.equalsIgnoreCase(fname.getText())) {
+      throw new SemanticException("Expected time range as " + TIME_RANGE_FUNC);
+    }
+
+    fromDateRaw =
+        PlanUtils.stripQuotes(timenode.getChild(1).getText());
+    if (timenode.getChildCount() > 2) {
+      toDateRaw = PlanUtils.stripQuotes(
+          timenode.getChild(2).getText());
+    }
+    Date now = new Date();
+
+    try {
+      timeFrom = DateUtils.resolveDate(fromDateRaw, now);
+      timeTo = DateUtils.resolveDate(toDateRaw, now);
+    } catch (HiveException e) {
+      throw new SemanticException(e);
+    }
+  }
+
+/*  private void extractColumns() {
+    //columnAliases = new ArrayList<String>();
+    tblToColumns = new LinkedHashMap<String, List<String>>();
+    ASTNode selectTree = qb.getParseInfo().getSelForClause(clauseName);
+
+    // Check if its 'select *  from...'
+    if (selectTree.getChildCount() == 1) {
+      ASTNode star = HQLParser.findNodeByPath(selectTree, TOK_SELEXPR,
+          TOK_ALLCOLREF);
+      if (star == null) {
+        star = HQLParser.findNodeByPath(selectTree, TOK_SELEXPR,
+            TOK_FUNCTIONSTAR);
+      }
+
+      if (star != null) {
+        int starType = star.getToken().getType();
+        if (TOK_FUNCTIONSTAR == starType || TOK_ALLCOLREF == starType) {
+          selectAllColumns = true;
+        }
+      }
+    }
+
+    // Traverse select, where, groupby, having and orderby trees to get column
+    // names
+    ASTNode trees[] = { selectTree, whereTree, groupbyTree,
+        havingTree, orderByTree};
+
+    for (ASTNode tree : trees) {
+      if (tree == null) {
+        continue;
+      }
+      // Traverse the tree to get column names
+      // We are doing a complete traversal so that expressions of columns
+      // are also captured ex: f(cola + colb/tab1.colc)
+      HQLParser.bft(tree, new ASTNodeVisitor() {
+        @Override
+        public void visit(TreeNode visited) {
+          ASTNode node = visited.getNode();
+          ASTNode parent = null;
+          if (visited.getParent() != null) {
+            parent = visited.getParent().getNode();
+          }
+
+          if (node.getToken().getType() == TOK_TABLE_OR_COL
+              && (parent != null && parent.getToken().getType() != DOT)) {
+            // Take child ident.totext
+            ASTNode ident = (ASTNode) node.getChild(0);
+            List<String> colList = tblToColumns.get(DEFAULT_TABLE);
+            if (colList == null) {
+              colList = new ArrayList<String>();
+              tblToColumns.put(DEFAULT_TABLE, colList);
+            }
+            if (!colList.contains(ident.getText())) {
+              colList.add(ident.getText());
+            }
+          } else if (node.getToken().getType() == DOT) {
+            // This is for the case where column name is prefixed by table name
+            // or table alias
+            // For example 'select fact.id, dim2.id ...'
+            // Right child is the column name, left child.ident is table name
+            ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL,
+                Identifier);
+            ASTNode colIdent = (ASTNode) node.getChild(1);
+
+            String column = colIdent.getText();
+            String table = tabident.getText();
+            List<String> colList = tblToColumns.get(table);
+            if (colList == null) {
+              colList = new ArrayList<String>();
+              tblToColumns.put(table, colList);
+            }
+            if (!colList.contains(column)) {
+              colList.add(column);
+            }
+          } else if (node.getToken().getType() == TOK_SELEXPR) {
+            // Extract column aliases for the result set, only applies to select
+            // trees
+            ASTNode alias = HQLParser.findNodeByPath(node, Identifier);
+            if (alias != null) {
+              columnAliases.add(alias.getText());
+            }
+          }
+        }
+      });
+    }
+  }
+*/
+  public String getFromDateRaw() {
+    return fromDateRaw;
+  }
+
+  public Cube getCube() {
+    return cube;
+  }
+
+  public String getToDateRaw() {
+    return toDateRaw;
+  }
+
+  public Date getFromDate() {
+    return timeFrom;
+  }
+
+  public Date getToDate() {
+    return timeTo;
+  }
+
+  public QB getQB() {
+    return qb;
+  }
+
+  public Set<CubeFactTable> getFactTables() {
+    return candidateFactTables;
+  }
+
+  public Set<CubeDimensionTable> getDimensionTables() {
+    return dimensions;
+  }
+
+  public void print() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("ASTNode:" + ast.dump() + "\n");
+    builder.append("QB:");
+    builder.append("\n numJoins:" + qb.getNumJoins());
+    builder.append("\n numGbys:" + qb.getNumGbys());
+    builder.append("\n numSels:" + qb.getNumSels());
+    builder.append("\n numSelDis:" + qb.getNumSelDi());
+    builder.append("\n aliasToTabs:");
+    Set<String> tabAliases = qb.getTabAliases();
+    for (String alias : tabAliases) {
+      builder.append("\n\t" + alias + ":" + qb.getTabNameForAlias(alias));
+    }
+    builder.append("\n aliases:");
+    for (String alias : qb.getAliases()) {
+      builder.append(alias);
+      builder.append(", ");
+    }
+    builder.append("id:" + qb.getId());
+    builder.append("isQuery:" + qb.getIsQuery());
+    builder.append("\n QBParseInfo");
+    QBParseInfo parseInfo = qb.getParseInfo();
+    builder.append("\n isSubQ: " + parseInfo.getIsSubQ());
+    builder.append("\n alias: " + parseInfo.getAlias());
+    if (parseInfo.getJoinExpr() != null) {
+      builder.append("\n joinExpr: " + parseInfo.getJoinExpr().dump());
+    }
+    builder.append("\n hints: " + parseInfo.getHints());
+    builder.append("\n aliasToSrc: ");
+    for (String alias : tabAliases) {
+      builder.append("\n\t" + alias +": " + parseInfo.getSrcForAlias(alias).dump());
+    }
+    TreeSet<String> clauses = new TreeSet<String>(parseInfo.getClauseNames());
+    for (String clause : clauses) {
+      builder.append("\n\t" + clause + ": " + parseInfo.getClauseNamesForDest());
+    }
+    String clause = clauses.first();
+    if (parseInfo.getWhrForClause(clause) != null) {
+      builder.append("\n whereexpr: " + parseInfo.getWhrForClause(clause).dump());
+    }
+    if (parseInfo.getGroupByForClause(clause) != null) {
+      builder.append("\n groupby expr: " + parseInfo.getGroupByForClause(clause).dump());
+    }
+    if (parseInfo.getSelForClause(clause) != null) {
+      builder.append("\n sel expr: " + parseInfo.getSelForClause(clause).dump());
+    }
+    if (parseInfo.getHavingForClause(clause) != null) {
+      builder.append("\n having expr: " + parseInfo.getHavingForClause(clause).dump());
+    }
+    if (parseInfo.getDestLimit(clause) != null) {
+      builder.append("\n limit: " + parseInfo.getDestLimit(clause));
+    }
+    if (parseInfo.getAllExprToColumnAlias() != null && !parseInfo.getAllExprToColumnAlias().isEmpty()) {
+      builder.append("\n exprToColumnAlias:");
+      for (Map.Entry<ASTNode, String> entry : parseInfo.getAllExprToColumnAlias().entrySet()) {
+        builder.append("\n\t expr: " + entry.getKey().dump() + " ColumnAlias: " + entry.getValue());
+      }
+    }
+    //builder.append("\n selectStar: " + parseInfo.isSelectStarQuery());
+    if (parseInfo.getAggregationExprsForClause(clause) != null) {
+      builder.append("\n aggregateexprs:");
+      for (Map.Entry<String, ASTNode> entry : parseInfo.getAggregationExprsForClause(clause).entrySet()) {
+        builder.append("\n\t key: " + entry.getKey() + " expr: " + entry.getValue().dump());
+      }
+    }
+    if (parseInfo.getDistinctFuncExprsForClause(clause) != null) {
+      builder.append("\n distinctFuncExprs:");
+      for (ASTNode entry : parseInfo.getDistinctFuncExprsForClause(clause)) {
+        builder.append("\n\t expr: " + entry.dump());
+      }
+    }
+
+    if(qb.getQbJoinTree() != null) {
+      builder.append("\n\n JoinTree");
+      QBJoinTree joinTree = qb.getQbJoinTree();
+      printJoinTree(joinTree, builder);
+    }
+    System.out.println(builder.toString());
+  }
+
+  void printJoinTree(QBJoinTree joinTree, StringBuilder builder) {
+    builder.append("leftAlias:" + joinTree.getLeftAlias());
+    if (joinTree.getLeftAliases() != null) {
+      builder.append("\n leftAliases:");
+      for (String alias: joinTree.getLeftAliases()) {
+        builder.append("\n\t " + alias);
+      }
+    }
+    if (joinTree.getRightAliases() != null) {
+      builder.append("\n rightAliases:");
+      for (String alias: joinTree.getRightAliases()) {
+        builder.append("\n\t " + alias);
+      }
+    }
+    if (joinTree.getJoinSrc() != null) {
+      builder.append("\n JoinSrc: {");
+      printJoinTree(joinTree.getJoinSrc(), builder);
+      builder.append("\n }");
+    }
+    if (joinTree.getBaseSrc() != null) {
+      builder.append("\n baseSrcs:");
+      for (String src: joinTree.getBaseSrc()) {
+        builder.append("\n\t " + src);
+      }
+    }
+    builder.append("\n noOuterJoin: " + joinTree.getNoOuterJoin());
+    builder.append("\n noSemiJoin: " + joinTree.getNoSemiJoin());
+    builder.append("\n mapSideJoin: " + joinTree.isMapSideJoin());
+    if (joinTree.getJoinCond() != null) {
+      builder.append("\n joinConds:");
+      for (JoinCond cond: joinTree.getJoinCond()) {
+        builder.append("\n\t left: " + cond.getLeft() + " right: "
+            + cond.getRight() + " type:" + cond.getJoinType()
+            + " preserved:" + cond.getPreserved());
+      }
+    }
+    if (joinTree.getExpressions() != null) {
+      builder.append("\n join expressions:");
+      for (ArrayList<ASTNode> exprs: joinTree.getExpressions()) {
+        builder.append("\n\t exprs:");
+        for (ASTNode expr : exprs) {
+          builder.append("\n\t\t expr:" + expr.dump());
+        }
+      }
+    }
+    if (joinTree.getFilters() != null) {
+      builder.append("\n join filters:");
+      for (ArrayList<ASTNode> exprs: joinTree.getFilters()) {
+        builder.append("\n\t filters:");
+        for (ASTNode expr : exprs) {
+          builder.append("\n\t\t expr:" + expr.dump());
+        }
+      }
+    }
+    if (joinTree.getFiltersForPushing() != null) {
+      builder.append("\n join filtersForPushing: ");
+      for (ArrayList<ASTNode> exprs: joinTree.getFiltersForPushing()) {
+        builder.append("\n\t filters:");
+        for (ASTNode expr : exprs) {
+          builder.append("\n\t\t expr:" + expr.dump());
+        }
+      }
+    }
+
+    if (joinTree.getNullSafes() != null) {
+      builder.append("\n join nullsafes: ");
+      for (Boolean bool: joinTree.getNullSafes()) {
+        builder.append("\n\t " + bool);
+      }
+    }
+    if (joinTree.getMapAliases() != null) {
+      builder.append("\n join mapaliases: ");
+      for (String alias : joinTree.getMapAliases()) {
+        builder.append("\n\t " + alias);
+      }
+    }
+    if (joinTree.getStreamAliases() != null) {
+      builder.append("\n join streamaliases: ");
+      for (String alias : joinTree.getStreamAliases()) {
+        builder.append("\n\t " + alias);
+      }
+    }
+  }
+
+  public String toHQL() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CubeQueryContextWithStorage extends CubeQueryContext {
+
+  private final List<String> supportedStorages;
+
+  public CubeQueryContextWithStorage(ASTNode ast, QB qb,
+      List<String> supportedStorages) throws SemanticException {
+    super(ast, qb, null);
+    this.supportedStorages = supportedStorages;
+  }
+
+  public CubeQueryContextWithStorage(CubeQueryContext cubeql,
+      List<String> supportedStorages) {
+    super(cubeql);
+    this.supportedStorages = supportedStorages;
+  }
+
+  public List<String> getStorageNames() {
+    return supportedStorages;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,74 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CubeQueryRewriter {
+  private final Configuration conf;
+  private final List<ContextRewriter> phase1Rewriters =
+      new ArrayList<ContextRewriter>();
+  private final List<ContextRewriter> phase2Rewriters =
+      new ArrayList<ContextRewriter>();
+
+  public CubeQueryRewriter(Configuration conf) {
+    this.conf = conf;
+    setupPhase1Rewriters();
+    setupPhase2Rewriters();
+  }
+
+  private void setupPhase1Rewriters() {
+    //Resolve joins and generate base join tree
+    phase1Rewriters.add(new JoinResolver(conf));
+    //Resolve aggregations and generate base select tree
+    phase1Rewriters.add(new AggregateResolver(conf));
+    phase1Rewriters.add(new GroupbyResolver(conf));
+    //Rewrite base trees (groupby, having, orderby, limit) using aliases
+    phase1Rewriters.add(new AliasReplacer(conf));
+    phase1Rewriters.add(new CandidateTableResolver(conf));
+    //Resolve partition columns and table names
+    phase1Rewriters.add(new PartitionResolver(conf));
+  }
+
+  private void setupPhase2Rewriters() {
+    phase2Rewriters.add(new StorageTableResolver(conf));
+    phase2Rewriters.add(new LeastPartitionResolver(conf));
+    phase2Rewriters.add(new LeastDimensionResolver(conf));
+  }
+
+  public CubeQueryContext rewritePhase1(String cubeql)
+      throws SemanticException, ParseException {
+    CubeQueryContext ctx;
+      CubeSemanticAnalyzer analyzer =  new CubeSemanticAnalyzer(
+          new HiveConf(conf, HiveConf.class));
+      analyzer.analyzeInternal(HQLParser.parseHQL(cubeql));
+      ctx = analyzer.getQueryContext();
+      rewrite(phase1Rewriters, ctx);
+    return ctx;
+  }
+
+  public CubeQueryContext rewritePhase2(CubeQueryContext cubeql,
+      List<String> storages) throws SemanticException {
+    CubeQueryContextWithStorage ctx = new CubeQueryContextWithStorage(
+        (CubeQueryContext)cubeql, storages);
+    rewrite(phase2Rewriters, ctx);
+    return ctx;
+  }
+
+
+  private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx)
+      throws SemanticException {
+    for (ContextRewriter rewriter : rewriters) {
+      rewriter.rewriteContext(ctx);
+    }
+  }
+
+  public static void main(String[] args) throws SemanticException, ParseException {
+    CubeQueryRewriter writer = new CubeQueryRewriter(new Configuration());
+    writer.rewritePhase1("select * from cube");
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,72 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class CubeSemanticAnalyzer extends SemanticAnalyzer {
+  private final HiveConf conf;
+  private final List<ValidationRule> validationRules = new ArrayList<ValidationRule>();
+  private CubeQueryContext cubeQl;
+
+  public CubeSemanticAnalyzer(HiveConf conf) throws SemanticException {
+    super(conf);
+    this.conf = conf;
+    setupRules();
+  }
+
+  private void setupRules() {
+    validationRules.add(new CheckTableNames(conf));
+    validationRules.add(new CheckDateRange(conf));
+    validationRules.add(new CheckColumnMapping(conf));
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode ast) throws SemanticException {
+    reset();
+    QB qb = new QB(null, null, false);
+    // do not allow create table/view commands
+    // TODO Move this to a validation rule
+    if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE ||
+        ast.getToken().getType() == HiveParser.TOK_CREATEVIEW) {
+      throw new SemanticException("Create table/view is not allowed");
+    }
+
+    //analyzing from the ASTNode.
+    if (!doPhase1(ast, qb, initPhase1Ctx())) {
+      // if phase1Result false return
+      return;
+    }
+    cubeQl = new CubeQueryContext(ast, qb, conf);
+    //cubeQl.init();
+    //validate();
+
+    // TODO Move this to a validation Rule
+    //QBParseInfo qbp = qb.getParseInfo();
+    //TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
+    //if (ks.size() > 1) {
+    //  throw new SemanticException("nested/sub queries not allowed yet");
+    //}
+    //Operator sinkOp = genPlan(qb);
+    //System.out.println(sinkOp.toString());
+  }
+
+  @Override
+  public void validate() throws SemanticException {
+    for (ValidationRule rule : validationRules) {
+      if (!rule.validate(cubeQl)) {
+        break;
+      }
+    }
+  }
+
+  public CubeQueryContext getQueryContext() {
+    return cubeQl;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,123 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.log4j.Logger;
+
+import com.google.common.base.Strings;
+
+public class DateUtils {
+  public static final Logger LOG = Logger.getLogger(DateUtils.class);
+
+  /*
+   * NOW -> new java.util.Date()
+   * NOW-7DAY -> a date one week earlier
+   * NOW (+-) <NUM>UNIT
+   * or Hardcoded dates in  DD-MM-YYYY hh:mm:ss,sss
+   */
+  public static final String RELATIVE = "(now){1}";
+  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
+
+  public static final String WSPACE = "\\s+";
+  public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
+
+  public static final String SIGNAGE = "\\+|\\-";
+  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
+
+  public static final String QUANTITY = "\\d+";
+  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
+
+  public static final String UNIT = "year|month|week|day|hour|minute|second";
+  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
+
+
+  public static final String RELDATE_VALIDATOR_STR = RELATIVE
+      + "(" + WSPACE + ")?"
+      + "((" + SIGNAGE +")"
+      + "(" + WSPACE + ")?"
+      + "(" + QUANTITY + ")(" + UNIT + ")){0,1}"
+      +"(s?)";
+
+  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
+
+  public static final String ABSDATE_FMT = "dd-MMM-yyyy HH:mm:ss,SSS Z";
+  public static final SimpleDateFormat ABSDATE_PARSER = new SimpleDateFormat(ABSDATE_FMT);
+
+  public static String formatDate(Date dt) {
+    return ABSDATE_PARSER.format(dt);
+  }
+
+  public static Date resolveDate(String str, Date now) throws HiveException {
+    if (RELDATE_VALIDATOR.matcher(str).matches()) {
+      return resolveRelativeDate(str, now);
+    } else {
+      try {
+        return ABSDATE_PARSER.parse(str);
+      } catch (ParseException e) {
+        LOG.error("Invalid date format. expected only " + ABSDATE_FMT + " date provided:" + str, e);
+        throw new HiveException("Date parsing error. expected format " + ABSDATE_FMT
+            + ", date provided: " + str
+            + ", failed because: " + e.getMessage());
+      }
+    }
+  }
+
+  private static Date resolveRelativeDate(String str, Date now) throws HiveException {
+    if (Strings.isNullOrEmpty(str)) {
+      throw new HiveException("date value cannot be null or empty:" + str);
+    }
+    // Get rid of whitespace
+    String raw = str.replaceAll(WSPACE, "").replaceAll(RELATIVE, "");
+
+    if (raw.isEmpty()) { // String is just "now"
+      return now;
+    }
+
+    Matcher qtyMatcher = P_QUANTITY.matcher(raw);
+    int qty = 1;
+    if (qtyMatcher.find() && true) {
+      qty =  Integer.parseInt(qtyMatcher.group());
+    }
+
+    Matcher signageMatcher = P_SIGNAGE.matcher(raw);
+    if (signageMatcher.find()) {
+      String sign = signageMatcher.group();
+      if ("-".equals(sign)) {
+        qty = -qty;
+      }
+    }
+
+    Matcher unitMatcher = P_UNIT.matcher(raw);
+    Calendar calendar = Calendar.getInstance();
+    calendar.setTime(now);
+
+    if (unitMatcher.find()) {
+      String unit = unitMatcher.group().toLowerCase();
+      if ("year".equals(unit)) {
+        calendar.add(Calendar.YEAR, qty);
+      } else if ("month".equals(unit)) {
+        calendar.add(Calendar.MONTH, qty);
+      } else if ("week".equals(unit)) {
+        calendar.add(Calendar.DAY_OF_MONTH, 7 * qty);
+      } else if ("day".equals(unit)) {
+        calendar.add(Calendar.DAY_OF_MONTH, qty);
+      } else if ("hour".equals(unit)) {
+        calendar.add(Calendar.HOUR_OF_DAY, qty);
+      } else if ("minute".equals(unit)) {
+        calendar.add(Calendar.MINUTE, qty);
+      } else if ("second".equals(unit)) {
+        calendar.add(Calendar.SECOND, qty);
+      } else {
+        throw new HiveException("invalid time unit: "+ unit);
+      }
+    }
+
+    return calendar.getTime();
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class GroupbyResolver implements ContextRewriter {
+
+  public GroupbyResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+    // TODO Auto-generated method stub
+    // Process Aggregations by making sure that all group by keys are projected;
+    // and all projection fields are added to group by keylist;
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,264 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+
+public class HQLParser {
+  
+  public static interface ASTNodeVisitor {
+    public void visit(TreeNode node);
+  }
+  
+  public static class TreeNode {
+    final TreeNode parent;
+    final ASTNode node;
+    public TreeNode(TreeNode parent, ASTNode node) {
+      this.parent = parent;
+      this.node = node;
+    }
+    
+    public TreeNode getParent() {
+      return parent;
+    }
+    
+    public ASTNode getNode() {
+      return node;
+    }
+  }
+  
+  public static final Set<Integer> BINARY_OPERATORS;
+  
+  static {
+    HashSet<Integer> ops = new HashSet<Integer>();
+    ops.add(DOT);
+    ops.add(KW_AND);
+    ops.add(KW_OR);
+    ops.add(EQUAL);
+    ops.add(NOTEQUAL);
+    ops.add(GREATERTHAN);
+    ops.add(GREATERTHANOREQUALTO);
+    ops.add(LESSTHAN);
+    ops.add(LESSTHANOREQUALTO);
+    ops.add(PLUS);
+    ops.add(MINUS);
+    ops.add(STAR);
+    ops.add(DIVIDE);
+    ops.add(MOD);
+    ops.add(KW_LIKE);
+    BINARY_OPERATORS = Collections.unmodifiableSet(ops);
+  }
+  
+  public static ASTNode parseHQL(String query) throws ParseException {
+    ParseDriver driver = new ParseDriver();
+    ASTNode tree = driver.parse(query);
+    tree = ParseUtils.findRootNonNullToken(tree);
+    printAST(tree);
+    return tree;
+  }
+  
+  public static void printAST(ASTNode node) {
+    try {
+      printAST(getHiveTokenMapping(), node, 0, 0);
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+  
+  /**
+   * Debug function for printing query AST to stdout
+   * @param node
+   * @param level
+   */
+  public static void printAST(Map<Integer, String> tokenMapping, ASTNode node,
+      int level, int child) {
+    if (node == null || node.isNil()) {
+      return;
+    }
+    
+    for (int i = 0; i < level; i++) {
+      System.out.print("  ");
+    }
+
+    System.out.print(node.getText() + " [" + tokenMapping.get(
+        node.getToken().getType()) + "]");
+    System.out.print(" (l"+level + "c" + child + ")");
+    
+    if (node.getChildCount() > 0) {
+      System.out.println(" {");
+      
+      for (int i = 0; i < node.getChildCount(); i++) {
+        Tree tree = node.getChild(i);
+        if (tree instanceof ASTNode) {
+          printAST(tokenMapping, (ASTNode) tree, level + 1, i+1);
+        } else {
+          System.out.println("NON ASTNode");
+        }
+        System.out.println();
+      }
+      
+      for (int i = 0; i < level; i++) {
+        System.out.print("  ");
+      }
+      
+      System.out.print("}");
+      
+    } else {
+      System.out.print('$');
+    }
+  }
+  
+  public static Map<Integer, String> getHiveTokenMapping() throws Exception {
+    Map<Integer, String> mapping = new HashMap<Integer, String>();
+    
+    for (Field f : HiveParser.class.getFields()) {
+      if (f.getType() == int.class) {
+        Integer tokenId = f.getInt(null);
+        String token = f.getName();
+        mapping.put(tokenId, token);
+      }
+    }
+    
+    return mapping;
+  }
+  
+
+  /**
+   * Find a node in the tree rooted at root, given the path of type of tokens
+   *  from the root's children to the desired node
+   * 
+   * @param root
+   * @param path starts at the level of root's children
+   * @return
+   */
+  public static ASTNode findNodeByPath (ASTNode root, int... path) {
+    for (int i = 0; i < path.length; i++) {
+      int type = path[i];
+      boolean hasChildWithType = false;
+      
+      for (int j = 0; j < root.getChildCount(); j++) {
+        ASTNode node = (ASTNode) root.getChild(j);
+        if (node.getToken().getType() == type) {
+          hasChildWithType = true;
+          root = node;
+          // If this is the last type in path, return this node
+          if (i == path.length - 1) {
+            return root;
+          } else {
+            // Go to next level
+            break;
+          }
+        } else {
+          // Go to next sibling.
+          continue;
+        }
+      }
+      
+      if (!hasChildWithType) {
+        // No path from this level
+        break;
+      }
+    }
+    
+    return null;
+  }
+  
+  /**
+   * Breadth first traversal of AST
+   * @param root
+   * @param visitor
+   */
+  public static void bft(ASTNode root, ASTNodeVisitor visitor) {
+    if (root == null) {
+      throw new NullPointerException("Root cannot be null");
+    }
+    
+    if (visitor == null) {
+      throw new NullPointerException("Visitor cannot be null");
+    }
+    Queue<TreeNode> queue = new LinkedList<TreeNode>();
+    queue.add(new TreeNode(null, root));
+    
+    while (!queue.isEmpty()) {
+      TreeNode node = queue.poll();
+      visitor.visit(node);
+      ASTNode astNode = node.getNode();
+      for (int i = 0; i < astNode.getChildCount(); i++) {
+        queue.offer(new TreeNode (node, (ASTNode)astNode.getChild(i)) );
+      }
+    }
+  }
+  
+  /**
+   * Recursively reconstruct query string given a query AST
+   * @param root
+   * @param buf preallocated builder where the reconstructed string will
+   *  be written
+   */
+  public static void toInfixString(ASTNode root, StringBuilder buf) {
+    if (root == null) {
+      return;
+    }
+    int rootType = root.getToken().getType();
+    // Operand, print contents
+    if (Identifier == rootType|| Number == rootType ||
+        StringLiteral == rootType) {
+      buf.append(' ').append(root.getText()).append(' ');
+    } else if (BINARY_OPERATORS.contains(
+          Integer.valueOf(root.getToken().getType()))) {
+        buf.append("(");
+        toInfixString((ASTNode)root.getChild(0), buf);
+        buf.append(' ').append(root.getText()).append(' ');
+        toInfixString((ASTNode) root.getChild(1), buf);
+        buf.append(")");
+    } else if (TOK_FUNCTION == root.getToken().getType()) {
+        String fname = ((ASTNode) root.getChild(0)).getText();
+        buf.append(fname).append("(");
+        for (int i = 1; i < root.getChildCount(); i++) {
+          toInfixString((ASTNode) root.getChild(i), buf);
+          if (i != root.getChildCount() -1) {
+            buf.append(", ");
+          }
+        }
+        buf.append(")");
+    } else if (TOK_SELECT == rootType) {
+      for (int i = 0; i < root.getChildCount(); i++) {
+        toInfixString((ASTNode) root.getChild(i), buf);
+        if (i != root.getChildCount() -1) {
+          buf.append(", ");
+        }
+      } 
+    } else {
+      for (int i = 0; i < root.getChildCount(); i++) {
+        toInfixString((ASTNode) root.getChild(i), buf);
+      }
+    }
+  }
+  
+  public static void main(String[] args) throws Exception {
+    ASTNode ast = parseHQL("select * from default_table " 
+    		);
+    
+    printAST(getHiveTokenMapping(), ast, 0, 0);
+  }
+
+  public static String getString(ASTNode tree) {
+    StringBuilder buf = new StringBuilder();
+    toInfixString(tree, buf);
+    return buf.toString();
+  }
+}
\ No newline at end of file

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,523 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.JoinCond;
+import org.apache.hadoop.hive.ql.parse.JoinType;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.QBJoinTree;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class JoinResolver implements ContextRewriter {
+
+  private static final Log LOG = LogFactory.getLog(JoinResolver.class);
+
+  public JoinResolver(Configuration conf) {
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+    resolveJoins((CubeQueryContext)cubeql);
+  }
+
+  public void resolveJoins(CubeQueryContext cubeql) throws SemanticException {
+    QB cubeQB = cubeql.getQB();
+    if (cubeQB.getParseInfo().getJoinExpr() != null) {
+      cubeQB.setQbJoinTree(genJoinTree(cubeQB,
+          cubeQB.getParseInfo().getJoinExpr()));
+    } else {
+      LOG.info("No join expr available");
+    }
+  }
+
+  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree)
+      throws SemanticException {
+    QBJoinTree joinTree = new QBJoinTree();
+    JoinCond[] condn = new JoinCond[1];
+
+    switch (joinParseTree.getToken().getType()) {
+    case HiveParser.TOK_LEFTOUTERJOIN:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
+      break;
+    case HiveParser.TOK_RIGHTOUTERJOIN:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
+      break;
+    case HiveParser.TOK_FULLOUTERJOIN:
+      joinTree.setNoOuterJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
+      break;
+    case HiveParser.TOK_LEFTSEMIJOIN:
+      joinTree.setNoSemiJoin(false);
+      condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
+      break;
+    default:
+      condn[0] = new JoinCond(0, 1, JoinType.INNER);
+      joinTree.setNoOuterJoin(true);
+      break;
+    }
+
+    joinTree.setJoinCond(condn);
+
+    ASTNode left = (ASTNode) joinParseTree.getChild(0);
+    ASTNode right = (ASTNode) joinParseTree.getChild(1);
+
+    if ((left.getToken().getType() == HiveParser.TOK_TABREF)
+        || (left.getToken().getType() == HiveParser.TOK_SUBQUERY)) {
+      String tableName = SemanticAnalyzer.getUnescapedUnqualifiedTableName(
+          (ASTNode) left.getChild(0))
+          .toLowerCase();
+      String alias = left.getChildCount() == 1 ? tableName
+          : SemanticAnalyzer.unescapeIdentifier(
+              left.getChild(left.getChildCount() - 1)
+              .getText().toLowerCase());
+      joinTree.setLeftAlias(alias);
+      String[] leftAliases = new String[1];
+      leftAliases[0] = alias;
+      joinTree.setLeftAliases(leftAliases);
+      String[] children = new String[2];
+      children[0] = alias;
+      joinTree.setBaseSrc(children);
+    } else if (isJoinToken(left)) {
+      QBJoinTree leftTree = genJoinTree(qb, left);
+      joinTree.setJoinSrc(leftTree);
+      String[] leftChildAliases = leftTree.getLeftAliases();
+      String leftAliases[] = new String[leftChildAliases.length + 1];
+      for (int i = 0; i < leftChildAliases.length; i++) {
+        leftAliases[i] = leftChildAliases[i];
+      }
+      leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
+      joinTree.setLeftAliases(leftAliases);
+    } else {
+      assert (false);
+    }
+
+    if ((right.getToken().getType() == HiveParser.TOK_TABREF)
+        || (right.getToken().getType() == HiveParser.TOK_SUBQUERY)) {
+      String tableName = SemanticAnalyzer.getUnescapedUnqualifiedTableName(
+          (ASTNode) right.getChild(0)).toLowerCase();
+      String alias = right.getChildCount() == 1 ? tableName
+          : SemanticAnalyzer.unescapeIdentifier(right.getChild(
+              right.getChildCount() - 1).getText().toLowerCase());
+      String[] rightAliases = new String[1];
+      rightAliases[0] = alias;
+      joinTree.setRightAliases(rightAliases);
+      String[] children = joinTree.getBaseSrc();
+      if (children == null) {
+        children = new String[2];
+      }
+      children[1] = alias;
+      joinTree.setBaseSrc(children);
+      // remember rhs table for semijoin
+      if (joinTree.getNoSemiJoin() == false) {
+        joinTree.addRHSSemijoin(alias);
+      }
+    } else {
+      assert false;
+    }
+
+    ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<
+        ArrayList<ASTNode>>();
+    expressions.add(new ArrayList<ASTNode>());
+    expressions.add(new ArrayList<ASTNode>());
+    joinTree.setExpressions(expressions);
+
+    ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
+    joinTree.setNullSafes(nullsafes);
+
+    ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
+    filters.add(new ArrayList<ASTNode>());
+    filters.add(new ArrayList<ASTNode>());
+    joinTree.setFilters(filters);
+
+    ArrayList<ArrayList<ASTNode>> filtersForPushing =
+        new ArrayList<ArrayList<ASTNode>>();
+    filtersForPushing.add(new ArrayList<ASTNode>());
+    filtersForPushing.add(new ArrayList<ASTNode>());
+    joinTree.setFiltersForPushing(filtersForPushing);
+
+    ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
+    ArrayList<String> leftSrc = new ArrayList<String>();
+    parseJoinCondition(joinTree, joinCond, leftSrc);
+    if (leftSrc.size() == 1) {
+      joinTree.setLeftAlias(leftSrc.get(0));
+    }
+
+    // check the hints to see if the user has specified a map-side join. This
+    // will be removed later on, once the cost-based
+    // infrastructure is in place
+    if (qb.getParseInfo().getHints() != null) {
+      List<String> mapSideTables = getMapSideJoinTables(qb);
+      List<String> mapAliases = joinTree.getMapAliases();
+
+      for (String mapTbl : mapSideTables) {
+        boolean mapTable = false;
+        for (String leftAlias : joinTree.getLeftAliases()) {
+          if (mapTbl.equalsIgnoreCase(leftAlias)) {
+            mapTable = true;
+          }
+        }
+        for (String rightAlias : joinTree.getRightAliases()) {
+          if (mapTbl.equalsIgnoreCase(rightAlias)) {
+            mapTable = true;
+          }
+        }
+
+        if (mapTable) {
+          if (mapAliases == null) {
+            mapAliases = new ArrayList<String>();
+          }
+          mapAliases.add(mapTbl);
+          joinTree.setMapSideJoin(true);
+        }
+      }
+
+      joinTree.setMapAliases(mapAliases);
+
+      parseStreamTables(joinTree, qb);
+    }
+
+    return joinTree;
+  }
+
+  private List<String> getMapSideJoinTables(QB qb) {
+    List<String> cols = new ArrayList<String>();
+    ASTNode hints = qb.getParseInfo().getHints();
+    for (int pos = 0; pos < hints.getChildCount(); pos++) {
+      ASTNode hint = (ASTNode) hints.getChild(pos);
+      if (((ASTNode) hint.getChild(0)).getToken().getType()
+          == HiveParser.TOK_MAPJOIN) {
+        ASTNode hintTblNames = (ASTNode) hint.getChild(1);
+        int numCh = hintTblNames.getChildCount();
+        for (int tblPos = 0; tblPos < numCh; tblPos++) {
+          String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText()
+              .toLowerCase();
+          if (!cols.contains(tblName)) {
+            cols.add(tblName);
+          }
+        }
+      }
+    }
+    return cols;
+  }
+
+  private boolean isJoinToken(ASTNode node) {
+    if ((node.getToken().getType() == HiveParser.TOK_JOIN)
+        || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_LEFTSEMIJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_UNIQUEJOIN)) {
+      return true;
+    }
+    return false;
+  }
+
+  private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond,
+      ArrayList<String> leftSrc) throws SemanticException {
+    if (joinCond == null) {
+      return;
+    }
+
+    JoinType type = joinTree.getJoinCond()[0].getJoinType();
+    switch (joinCond.getToken().getType()) {
+    case HiveParser.KW_OR:
+      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_3
+          .getMsg(joinCond));
+
+    case HiveParser.KW_AND:
+      parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(0), leftSrc);
+      parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(1), leftSrc);
+      break;
+
+    case HiveParser.EQUAL_NS:
+    case HiveParser.EQUAL:
+      ASTNode leftCondn = (ASTNode) joinCond.getChild(0);
+      ArrayList<String> leftCondAl1 = new ArrayList<String>();
+      ArrayList<String> leftCondAl2 = new ArrayList<String>();
+      parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
+          null);
+
+      ASTNode rightCondn = (ASTNode) joinCond.getChild(1);
+      ArrayList<String> rightCondAl1 = new ArrayList<String>();
+      ArrayList<String> rightCondAl2 = new ArrayList<String>();
+      parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
+          rightCondAl2, null);
+
+      // is it a filter or a join condition
+      // if it is filter see if it can be pushed above the join
+      // filter cannot be pushed if
+      // * join is full outer or
+      // * join is left outer and filter is on left alias or
+      // * join is right outer and filter is on right alias
+      if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
+          || ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
+        throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
+            .getMsg(joinCond));
+      }
+
+      if (leftCondAl1.size() != 0) {
+        if ((rightCondAl1.size() != 0)
+            || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
+          if (type.equals(JoinType.LEFTOUTER) ||
+              type.equals(JoinType.FULLOUTER)) {
+            joinTree.getFilters().get(0).add(joinCond);
+          } else {
+            joinTree.getFiltersForPushing().get(0).add(joinCond);
+          }
+        } else if (rightCondAl2.size() != 0) {
+          populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
+              leftSrc);
+          populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
+              leftSrc);
+          boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
+          joinTree.getNullSafes().add(nullsafe);
+        }
+      } else if (leftCondAl2.size() != 0) {
+        if ((rightCondAl2.size() != 0)
+            || ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
+          if (type.equals(JoinType.RIGHTOUTER)
+              || type.equals(JoinType.FULLOUTER)) {
+            joinTree.getFilters().get(1).add(joinCond);
+          } else {
+            joinTree.getFiltersForPushing().get(1).add(joinCond);
+          }
+        } else if (rightCondAl1.size() != 0) {
+          populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
+              leftSrc);
+          populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
+              leftSrc);
+          boolean nullsafe = joinCond.getToken().getType() ==
+              HiveParser.EQUAL_NS;
+          joinTree.getNullSafes().add(nullsafe);
+        }
+      } else if (rightCondAl1.size() != 0) {
+        if (type.equals(JoinType.LEFTOUTER)
+            || type.equals(JoinType.FULLOUTER)) {
+          joinTree.getFilters().get(0).add(joinCond);
+        } else {
+          joinTree.getFiltersForPushing().get(0).add(joinCond);
+        }
+      } else {
+        if (type.equals(JoinType.RIGHTOUTER)
+            || type.equals(JoinType.FULLOUTER)) {
+          joinTree.getFilters().get(1).add(joinCond);
+        } else {
+          joinTree.getFiltersForPushing().get(1).add(joinCond);
+        }
+      }
+
+      break;
+
+    default:
+      boolean isFunction = (joinCond.getType() == HiveParser.TOK_FUNCTION);
+
+      // Create all children
+      int childrenBegin = (isFunction ? 1 : 0);
+      ArrayList<ArrayList<String>> leftAlias = new ArrayList<ArrayList<String>>(
+          joinCond.getChildCount() - childrenBegin);
+      ArrayList<ArrayList<String>> rightAlias = new ArrayList<ArrayList<String>>(
+          joinCond.getChildCount() - childrenBegin);
+      for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) {
+        ArrayList<String> left = new ArrayList<String>();
+        ArrayList<String> right = new ArrayList<String>();
+        leftAlias.add(left);
+        rightAlias.add(right);
+      }
+
+      for (int ci = childrenBegin; ci < joinCond.getChildCount(); ci++) {
+        parseJoinCondPopulateAlias(joinTree, (ASTNode) joinCond.getChild(ci),
+            leftAlias.get(ci - childrenBegin), rightAlias.get(ci
+                - childrenBegin), null);
+      }
+
+      boolean leftAliasNull = true;
+      for (ArrayList<String> left : leftAlias) {
+        if (left.size() != 0) {
+          leftAliasNull = false;
+          break;
+        }
+      }
+
+      boolean rightAliasNull = true;
+      for (ArrayList<String> right : rightAlias) {
+        if (right.size() != 0) {
+          rightAliasNull = false;
+          break;
+        }
+      }
+
+      if (!leftAliasNull && !rightAliasNull) {
+        throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
+            .getMsg(joinCond));
+      }
+
+      if (!leftAliasNull) {
+        if (type.equals(JoinType.LEFTOUTER)
+            || type.equals(JoinType.FULLOUTER)) {
+           joinTree.getFilters().get(0).add(joinCond);
+        } else {
+          joinTree.getFiltersForPushing().get(0).add(joinCond);
+        }
+      } else {
+        if (type.equals(JoinType.RIGHTOUTER)
+            || type.equals(JoinType.FULLOUTER)) {
+          joinTree.getFilters().get(1).add(joinCond);
+        } else {
+          joinTree.getFiltersForPushing().get(1).add(joinCond);
+        }
+      }
+
+      break;
+    }
+  }
+
+  private boolean isPresent(String[] list, String elem) {
+    for (String s : list) {
+      if (s.toLowerCase().equals(elem)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn,
+      ArrayList<String> leftAliases, ArrayList<String> rightAliases,
+      ArrayList<String> fields) throws SemanticException {
+    // String[] allAliases = joinTree.getAllAliases();
+    switch (condn.getToken().getType()) {
+    case HiveParser.TOK_TABLE_OR_COL:
+      String tableOrCol = SemanticAnalyzer.unescapeIdentifier(
+          condn.getChild(0).getText().toLowerCase());
+      if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
+        if (!leftAliases.contains(tableOrCol)) {
+          leftAliases.add(tableOrCol);
+        }
+      } else if (isPresent(joinTree.getRightAliases(), tableOrCol)) {
+        if (!rightAliases.contains(tableOrCol)) {
+          rightAliases.add(tableOrCol);
+        }
+      } else {
+        // We don't support columns without table prefix in JOIN condition right
+        // now.
+        // We need to pass Metadata here to know which table the column belongs
+        // to.
+        throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(condn
+            .getChild(0)));
+      }
+      break;
+
+    case HiveParser.Identifier:
+      // it may be a field name, return the identifier and let the caller decide
+      // whether it is or not
+      if (fields != null) {
+        fields.add(SemanticAnalyzer.unescapeIdentifier(
+            condn.getToken().getText().toLowerCase()));
+      }
+      break;
+    case HiveParser.Number:
+    case HiveParser.StringLiteral:
+    case HiveParser.TOK_STRINGLITERALSEQUENCE:
+    case HiveParser.TOK_CHARSETLITERAL:
+    case HiveParser.KW_TRUE:
+    case HiveParser.KW_FALSE:
+      break;
+
+    case HiveParser.TOK_FUNCTION:
+      // check all the arguments
+      for (int i = 1; i < condn.getChildCount(); i++) {
+        parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(i),
+            leftAliases, rightAliases, null);
+      }
+      break;
+
+    default:
+      // This is an operator - so check whether it is unary or binary operator
+      if (condn.getChildCount() == 1) {
+        parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
+            leftAliases, rightAliases, null);
+      } else if (condn.getChildCount() == 2) {
+
+        ArrayList<String> fields1 = null;
+        // if it is a dot operator, remember the field name of the rhs of the
+        // left semijoin
+        if (joinTree.getNoSemiJoin() == false
+            && condn.getToken().getType() == HiveParser.DOT) {
+          // get the semijoin rhs table name and field name
+          fields1 = new ArrayList<String>();
+          int rhssize = rightAliases.size();
+          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
+              leftAliases, rightAliases, null);
+          String rhsAlias = null;
+
+          if (rightAliases.size() > rhssize) { // the new table is rhs table
+            rhsAlias = rightAliases.get(rightAliases.size() - 1);
+          }
+          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
+              leftAliases, rightAliases, fields1);
+          if (rhsAlias != null && fields1.size() > 0) {
+            joinTree.addRHSSemijoinColumns(rhsAlias, condn);
+          }
+        } else {
+          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
+              leftAliases, rightAliases, null);
+          parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
+              leftAliases, rightAliases, fields1);
+        }
+      } else {
+        throw new SemanticException(condn.toStringTree() + " encountered with "
+            + condn.getChildCount() + " children");
+      }
+      break;
+    }
+  }
+
+  private void populateAliases(ArrayList<String> leftAliases,
+      ArrayList<String> rightAliases, ASTNode condn, QBJoinTree joinTree,
+      ArrayList<String> leftSrc) throws SemanticException {
+    if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) {
+      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
+          .getMsg(condn));
+    }
+    if (rightAliases.size() != 0) {
+      assert rightAliases.size() == 1;
+      joinTree.getExpressions().get(1).add(condn);
+    } else if (leftAliases.size() != 0) {
+      joinTree.getExpressions().get(0).add(condn);
+      for (String s : leftAliases) {
+        if (!leftSrc.contains(s)) {
+          leftSrc.add(s);
+        }
+      }
+    } else {
+      throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_2
+          .getMsg(condn));
+    }
+  }
+
+  private void parseStreamTables(QBJoinTree joinTree, QB qb) {
+    List<String> streamAliases = joinTree.getStreamAliases();
+    for (Node hintNode : qb.getParseInfo().getHints().getChildren()) {
+      ASTNode hint = (ASTNode) hintNode;
+      if (hint.getChild(0).getType() == HiveParser.TOK_STREAMTABLE) {
+        for (int i = 0; i < hint.getChild(1).getChildCount(); i++) {
+          if (streamAliases == null) {
+            streamAliases = new ArrayList<String>();
+          }
+          streamAliases.add(hint.getChild(1).getChild(i).getText());
+        }
+      }
+    }
+    joinTree.setStreamAliases(streamAliases);
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class LeastDimensionResolver implements ContextRewriter {
+
+  public LeastDimensionResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql)
+      throws SemanticException {
+    // TODO Auto-generated method stub
+
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class LeastPartitionResolver implements ContextRewriter {
+
+  public LeastPartitionResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql)
+      throws SemanticException {
+    // TODO Auto-generated method stub
+
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,54 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class PartitionResolver implements ContextRewriter {
+
+  public PartitionResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) {
+    Map<String, List<String>> partitionColMap = new HashMap<String,
+        List<String>>();
+    /*Date fromDate = cubeql.getFromDate();
+    Date toDate = cubeql.getToDate();
+
+    //resolve summary table names, applicable only if query is on fact table
+
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(fromDate);
+
+    UpdatePeriod interval = null;
+    for (CubeFactTable fact : cubeql.getFactTables()) {
+      while ((interval = CubeFactTable.maxIntervalInRange(fromDate, toDate,
+          fact.getUpdatePeriods())) != null) {
+        List<String> partitions = fact.getPartitions(fromDate, toDate,
+            interval);
+        if (partitions != null) {
+          partitionColMap.put(MetastoreUtil.getVirtualFactTableName(
+              fact.getName(), interval), partitions);
+          // Advance from date
+          cal.setTime(fromDate);
+          cal.roll(interval.calendarField(), partitions.size());
+          fromDate = cal.getTime();
+        }
+      }
+    }
+    for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
+      partitionColMap.put(MetastoreUtil.getVirtualDimTableName(
+          dim.getName()), dim.getPartitions());
+    }
+
+    // set partition cols map in cubeql
+    //TODO
+     *
+     */
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public class StorageTableResolver implements ContextRewriter {
+
+  public StorageTableResolver(Configuration conf) {
+    // TODO Auto-generated constructor stub
+  }
+
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql)
+      throws SemanticException {
+    // TODO
+    //Find candidate tables wrt supported storages
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+public abstract class ValidationRule {
+  Configuration conf;
+  String error;
+
+  public ValidationRule(Configuration conf) {
+    this.conf = conf;
+  }
+
+  public abstract boolean validate(CubeQueryContext ctx) throws SemanticException;
+
+  public String getErrorMessage() {
+    return error;
+  }
+}

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,51 @@
+package org.apache.hadoop.hive.ql.cube.processors;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.cube.parse.CubeQueryContext;
+import org.apache.hadoop.hive.ql.cube.parse.CubeQueryRewriter;
+
+public class CubeDriver extends Driver {
+
+  public CubeDriver(HiveConf conf) {
+    super(conf);
+  }
+
+  public CubeDriver() {
+    super();
+  }
+
+  @Override
+  public int compile(String command) {
+    // compile the cube query and rewrite it to HQL query
+    CubeQueryRewriter rewriter = new CubeQueryRewriter(getConf());
+    CubeQueryContext finalQuery;
+    try {
+      // 1. rewrite query to get summary tables and joins
+      CubeQueryContext phase1Query = rewriter.rewritePhase1(command);
+      finalQuery = rewriter.rewritePhase2(phase1Query,
+          getSupportedStorages(getConf()));
+    } catch (Exception e) {
+      ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
+      errorMessage = "FAILED: " + e.getClass().getSimpleName();
+      if (error != ErrorMsg.GENERIC_ERROR) {
+        errorMessage += " [Error "  + error.getErrorCode()  + "]:";
+      }
+      errorMessage += " " + e.getMessage();
+      SQLState = error.getSQLState();
+      console.printError(errorMessage, "\n"
+          + org.apache.hadoop.util.StringUtils.stringifyException(e));
+      return error.getErrorCode();
+
+    }
+    return super.compile(finalQuery.toHQL());
+  }
+
+  private List<String> getSupportedStorages(HiveConf conf) {
+    // TODO Auto-generated method stub
+    return null;
+  }
+}

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=1456361&r1=1456360&r2=1456361&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Thu Mar 14 08:16:24 2013
@@ -19,12 +19,14 @@
 package org.apache.hadoop.hive.ql.processors;
 
 import static org.apache.commons.lang.StringUtils.isBlank;
-import java.util.Map;
+
 import java.util.HashMap;
+import java.util.Map;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.cube.processors.CubeDriver;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
 
 /**
  * CommandProcessorFactory.
@@ -55,6 +57,8 @@ public final class CommandProcessorFacto
       return new AddResourceProcessor();
     } else if ("delete".equals(cmdl)) {
       return new DeleteResourceProcessor();
+    } else if ("cube".equals(cmdl)) {
+      return new CubeDriver(conf);
     } else if (!isBlank(cmd)) {
       if (conf == null) {
         return new Driver();

Added: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java?rev=1456361&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java (added)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java Thu Mar 14 08:16:24 2013
@@ -0,0 +1,32 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.junit.Test;
+
+public class TestCubeSemanticAnalyzer {
+  Configuration conf = new Configuration();
+
+  CubeSemanticAnalyzer analyzer;
+
+  ASTNode astRoot;
+
+  String queries[] = { "SELECT t1.c1 rsalias0, f(t1.c2) rsalias1," +
+  		" (t2.c3 + t2.c4) rsalias2, avg(fc5/fc6) * fc7 " +
+      " FROM facttab t1" +
+      " WHERE ( fc1='foo' and fc2 = 250 or sin(fc3)=1.0 ) " +
+      " and time_range_in('NOW-7DAYS', 'NOW')" +
+      " GROUP BY t1.ca, t1.cb" +
+      " HAVING t2.c3 > 100" +
+      " ORDER BY t3.ca, t4.cb" +
+      " LIMIT 100"
+  };
+
+  @Test
+  public void testSemnaticAnalyzer() throws Exception {
+    analyzer = new CubeSemanticAnalyzer(new HiveConf(conf, HiveConf.class));
+    astRoot = HQLParser.parseHQL(queries[0]);
+    analyzer.analyzeInternal(astRoot);
+  }
+}