You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pu...@apache.org on 2017/02/08 02:23:38 UTC

[6/7] lens git commit: feature upadte 2 with query writing flow completed (Few test cases need to be fixed though)

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index d8f1ab4..646dbd6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -169,14 +169,14 @@ public class DenormalizationResolver implements ContextRewriter {
       return null;
     }
 
-    public Set<Dimension> rewriteDenormctx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
+    public Set<Dimension> rewriteDenormctx(StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery,
       boolean replaceFact) throws LensException {
       Set<Dimension> refTbls = new HashSet<>();
 
       if (!tableToRefCols.isEmpty()) {
         // pick referenced columns for fact
-        if (cfact != null) {
-          pickColumnsForTable(cfact.getName());
+        if (sc != null) {
+          pickColumnsForTable(sc.getName());
         }
         // pick referenced columns for dimensions
         if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
@@ -185,11 +185,11 @@ public class DenormalizationResolver implements ContextRewriter {
           }
         }
         // Replace picked reference in all the base trees
-        replaceReferencedColumns(cfact, replaceFact);
+        replaceReferencedColumns(sc, replaceFact);
 
         // Add the picked references to dimsToQuery
         for (PickedReference picked : pickedRefs) {
-          if (isPickedFor(picked, cfact, dimsToQuery)) {
+          if (isPickedFor(picked, sc, dimsToQuery)) {
             refTbls.add((Dimension) cubeql.getCubeTableForAlias(picked.getChainRef().getChainName()));
             cubeql.addColumnsQueried(picked.getChainRef().getChainName(), picked.getChainRef().getRefColumn());
           }
@@ -199,8 +199,8 @@ public class DenormalizationResolver implements ContextRewriter {
     }
 
     // checks if the reference if picked for facts and dimsToQuery passed
-    private boolean isPickedFor(PickedReference picked, CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery) {
-      if (cfact != null && picked.pickedFor.equalsIgnoreCase(cfact.getName())) {
+    private boolean isPickedFor(PickedReference picked, StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery) {
+      if (sc != null && picked.pickedFor.equalsIgnoreCase(sc.getName())) {
         return true;
       }
       if (dimsToQuery != null) {
@@ -237,18 +237,16 @@ public class DenormalizationResolver implements ContextRewriter {
       }
     }
 
-    private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws LensException {
+    private void replaceReferencedColumns(StorageCandidate sc, boolean replaceFact) throws LensException {
       QueryAST ast = cubeql;
-      boolean factRefExists = cfact != null && tableToRefCols.get(cfact.getName()) != null && !tableToRefCols.get(cfact
+      boolean factRefExists = sc != null && tableToRefCols.get(sc.getName()) != null && !tableToRefCols.get(sc
         .getName()).isEmpty();
       if (replaceFact && factRefExists) {
-        ast = cfact;
+        ast = sc.getQueryAst();
       }
       resolveClause(cubeql, ast.getSelectAST());
       if (factRefExists) {
-        for (ASTNode storageWhereClauseAST : cfact.getStorgeWhereClauseMap().values()) {
-          resolveClause(cubeql, storageWhereClauseAST);
-        }
+          resolveClause(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         resolveClause(cubeql, ast.getWhereAST());
       }
@@ -346,30 +344,28 @@ public class DenormalizationResolver implements ContextRewriter {
       // candidate tables which require denorm fields and the refernces are no
       // more valid will be pruned
       if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
-        for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext();) {
-          Candidate cand = i.next();
+        for (Iterator<StorageCandidate> i =
+             CandidateUtil.getStorageCandidates(cubeql.getCandidates()).iterator(); i.hasNext();) {
+          StorageCandidate sc = i.next();
           //TODO union : is this happening in pahse 1 or 2 ?
-          //TODO Union : If phase 2, the below code will not work. Move to phase1 in that case
-          if (cand instanceof StorageCandidate) {
-            StorageCandidate sc = (StorageCandidate) cand;
+          //TODO union : If phase 2, the below code will not work. Move to phase1 in that case
             if (denormCtx.tableToRefCols.containsKey(sc.getFact().getName())) {
               for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(sc.getFact().getName())) {
                 if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
                   log.info("Not considering storage candidate :{} as column {} is not available", sc, refcol.col);
                   cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(refcol.col.getName()));
-                  i.remove();
+                  Collection<Candidate> prunedCandidates = CandidateUtil.filterCandidates(cubeql.getCandidates(), sc);
+                  cubeql.addCandidatePruningMsg(prunedCandidates,
+                      new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
                 }
               }
-            }
-          } else {
-            throw new LensException("Not a storage candidate!!");
           }
         }
         if (cubeql.getCandidates().size() == 0) {
           throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
               cubeql.getColumnsQueriedForTable(cubeql.getCube().getName()).toString());
         }
-        cubeql.pruneCandidateFactSet(CandidateTablePruneCode.COLUMN_NOT_FOUND);
+
       }
       if (cubeql.getDimensions() != null && !cubeql.getDimensions().isEmpty()) {
         for (Dimension dim : cubeql.getDimensions()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 1b8c560..0cf4b1c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -419,13 +419,13 @@ class ExpressionResolver implements ContextRewriter {
       return true;
     }
 
-    public Set<Dimension> rewriteExprCtx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
+    public Set<Dimension> rewriteExprCtx(StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery,
       QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
-        if (cfact != null) {
-          pickExpressionsForTable(cfact);
+        if (sc != null) {
+          pickExpressionsForTable(sc);
         }
         // pick expressions for dimensions
         if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
@@ -434,7 +434,7 @@ class ExpressionResolver implements ContextRewriter {
           }
         }
         // Replace picked expressions in all the base trees
-        replacePickedExpressions(cfact, queryAST);
+        replacePickedExpressions(sc, queryAST);
         log.debug("Picked expressions: {}", pickedExpressions);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
@@ -446,13 +446,11 @@ class ExpressionResolver implements ContextRewriter {
       return exprDims;
     }
 
-    private void replacePickedExpressions(CandidateFact cfact, QueryAST queryAST)
+    private void replacePickedExpressions(StorageCandidate sc, QueryAST queryAST)
       throws LensException {
       replaceAST(cubeql, queryAST.getSelectAST());
-      if (cfact != null) {
-        for (ASTNode storageWhereClauseAST : cfact.getStorgeWhereClauseMap().values()) {
-          replaceAST(cubeql, storageWhereClauseAST);
-        }
+      if (sc != null) {
+          replaceAST(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         replaceAST(cubeql, queryAST.getWhereAST());
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 216ae52..6ccf3d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -19,9 +19,12 @@
 package org.apache.lens.cube.parse;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+import static org.apache.lens.cube.parse.ColumnResolver.addColumnsForSelectExpr;
 
 import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.lens.cube.metadata.AbstractBaseTable;
 import org.apache.lens.server.api.error.LensException;
@@ -42,19 +45,19 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 class GroupbyResolver implements ContextRewriter {
 
+  private static final String SELECT_ALIAS_PREFIX = "select_expr";
   private final boolean selectPromotionEnabled;
   private final boolean groupbyPromotionEnabled;
 
   public GroupbyResolver(Configuration conf) {
-    selectPromotionEnabled =
-      conf.getBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, CubeQueryConfUtil.DEFAULT_ENABLE_SELECT_TO_GROUPBY);
-    groupbyPromotionEnabled =
-      conf.getBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT,
-        CubeQueryConfUtil.DEFAULT_ENABLE_GROUP_BY_TO_SELECT);
+    selectPromotionEnabled = conf
+      .getBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, CubeQueryConfUtil.DEFAULT_ENABLE_SELECT_TO_GROUPBY);
+    groupbyPromotionEnabled = conf
+      .getBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, CubeQueryConfUtil.DEFAULT_ENABLE_GROUP_BY_TO_SELECT);
   }
 
-  private void promoteSelect(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs,
-    List<String> groupByExprs) throws LensException {
+  private void promoteSelect(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs, List<String> groupByExprs)
+    throws LensException {
     if (!selectPromotionEnabled) {
       return;
     }
@@ -79,7 +82,7 @@ class GroupbyResolver implements ContextRewriter {
                 groupbyAST.addChild(exprAST);
               } else {
                 // no group by ast exist, create one
-                ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY));
+                ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY, "TOK_GROUPBY"));
                 newAST.addChild(exprAST);
                 cubeql.setGroupByAST(newAST);
               }
@@ -97,7 +100,6 @@ class GroupbyResolver implements ContextRewriter {
     return node != null && node.getToken() != null && !hasTableOrColumn(node);
   }
 
-
   /*
    * Check if table or column used in node
    */
@@ -115,8 +117,7 @@ class GroupbyResolver implements ContextRewriter {
     return false;
   }
 
-  private void promoteGroupby(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs,
-                              List<String> groupByExprs)
+  private void promoteGroupby(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs, List<String> groupByExprs)
     throws LensException {
     if (!groupbyPromotionEnabled) {
       return;
@@ -131,12 +132,44 @@ class GroupbyResolver implements ContextRewriter {
     for (String expr : groupByExprs) {
       if (!contains(selectExprs, expr)) {
         ASTNode exprAST = HQLParser.parseExpr(expr, cubeql.getConf());
-        addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
+        ASTNode parent = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR, "TOK_SELEXPR"));
+        parent.addChild(exprAST);
+        exprAST.setParent(parent);
+        addChildAtIndex(index, cubeql.getSelectAST(), parent);
+        updateSelectPhrase(cubeql, index, parent);
         index++;
       }
     }
   }
 
+  private void updateSelectPhrase(CubeQueryContext cubeql, int index, ASTNode selectExpr) {
+    int exprInd = index;
+    ASTNode selectExprChild = (ASTNode) selectExpr.getChild(0);
+    Set<String> cols = new HashSet<>();
+    SelectPhraseContext sel = new SelectPhraseContext(selectExpr);
+    addColumnsForSelectExpr(sel, selectExpr, cubeql.getSelectAST(), cols);
+    String alias = selectExpr.getChildCount() > 1 ? selectExpr.getChild(1).getText() : null;
+    String selectAlias;
+    String selectFinalAlias = null;
+    if (alias != null) {
+      selectFinalAlias = alias;
+      selectAlias = SELECT_ALIAS_PREFIX + exprInd;
+    } else if (cols.size() == 1 && (selectExprChild.getToken().getType() == TOK_TABLE_OR_COL
+      || selectExprChild.getToken().getType() == DOT)) {
+      // select expression is same as the column
+      selectAlias = cols.iterator().next().toLowerCase();
+    } else {
+      selectAlias = SELECT_ALIAS_PREFIX + exprInd;
+      selectFinalAlias = HQLParser.getString(selectExprChild);
+    }
+    cubeql.addColumnsQueried(sel.getTblAliasToColumns());
+    sel.setSelectAlias(selectAlias);
+    sel.setFinalAlias(!StringUtils.isBlank(selectFinalAlias) ? "`" + selectFinalAlias + "`" : selectAlias);
+    sel.setActualAlias(alias != null ? alias.toLowerCase() : null);
+    cubeql.getSelectPhrases().add(exprInd, sel);
+    //cubeql.addSelectPhrase(sel);
+  }
+
   private void addChildAtIndex(int index, ASTNode parent, ASTNode child) {
     // add the last child
     int count = parent.getChildCount();
@@ -158,7 +191,7 @@ class GroupbyResolver implements ContextRewriter {
     List<SelectPhraseContext> selectExprs = getSelectNonAggregateNonMeasureExpressions(cubeql);
     List<String> groupByExprs = new ArrayList<>();
     if (cubeql.getGroupByString() != null) {
-      String[] gby = getGroupbyExpressions(cubeql.getGroupByAST()).toArray(new String[]{});
+      String[] gby = getGroupbyExpressions(cubeql.getGroupByAST()).toArray(new String[] {});
       for (String g : gby) {
         groupByExprs.add(g.trim());
       }
@@ -228,7 +261,7 @@ class GroupbyResolver implements ContextRewriter {
       // by the time Groupby resolver is looking for aggregate, all columns should be aliased with correct
       // alias name.
       if (cubeql.getCubeTableForAlias(alias) instanceof AbstractBaseTable) {
-        if (((AbstractBaseTable)cubeql.getCubeTableForAlias(alias)).getExpressionByName(colname) != null) {
+        if (((AbstractBaseTable) cubeql.getCubeTableForAlias(alias)).getExpressionByName(colname) != null) {
           return cubeql.getExprCtx().getExpressionContext(colname, alias).hasAggregates();
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
index 7781ba6..d89e7b4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -1,16 +1,11 @@
 package org.apache.lens.cube.parse;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
-import lombok.Getter;
-
 /**
  * Represents a join of two candidates
  */
@@ -22,46 +17,33 @@ public class JoinCandidate implements Candidate {
   private Candidate childCandidate1;
   private Candidate childCandidate2;
   private String toStr;
-  @Getter
-  private String alias;
+  private QueryAST queryAST;
+  private CubeQueryContext cubeql;
 
-  public JoinCandidate(Candidate childCandidate1, Candidate childCandidate2, String alias) {
+  public JoinCandidate(Candidate childCandidate1, Candidate childCandidate2, CubeQueryContext cubeql) {
     this.childCandidate1 = childCandidate1;
     this.childCandidate2 = childCandidate2;
-    this.alias = alias;
-  }
-
-  private String getJoinCondition() {
-    return null;
-  }
-
-  @Override
-  public String toHQL() {
-    return null;
-  }
-
-  @Override
-  public QueryAST getQueryAst() {
-    return null;
+    this.cubeql = cubeql;
   }
 
   @Override
   public Collection<String> getColumns() {
-    return null;
+    Set<String> columns = new HashSet<>();
+    columns.addAll(childCandidate1.getColumns());
+    columns.addAll(childCandidate2.getColumns());
+    return columns;
   }
 
   @Override
   public Date getStartTime() {
     return childCandidate1.getStartTime().after(childCandidate2.getStartTime())
-           ? childCandidate1.getStartTime()
-           : childCandidate2.getStartTime();
+        ? childCandidate1.getStartTime() : childCandidate2.getStartTime();
   }
 
   @Override
   public Date getEndTime() {
     return childCandidate1.getEndTime().before(childCandidate2.getEndTime())
-           ? childCandidate1.getEndTime()
-           : childCandidate2.getEndTime();
+        ? childCandidate1.getEndTime() : childCandidate2.getEndTime();
   }
 
   @Override
@@ -90,19 +72,35 @@ public class JoinCandidate implements Candidate {
    * @return
    */
   @Override
-  public boolean evaluateCompleteness(TimeRange timeRange, boolean failOnPartialData) throws LensException {
-    return this.childCandidate1.evaluateCompleteness(timeRange, failOnPartialData) && this.childCandidate2
-      .evaluateCompleteness(timeRange, failOnPartialData);
+  public boolean evaluateCompleteness(TimeRange timeRange, TimeRange parentTimeRange, boolean failOnPartialData)
+      throws LensException {
+    return this.childCandidate1.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData)
+        && this.childCandidate2.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData);
   }
 
+  /**
+   * @return all the partitions from the children
+   */
   @Override
   public Set<FactPartition> getParticipatingPartitions() {
-    return null;
+    Set<FactPartition> factPartitionsSet = new HashSet<>();
+    factPartitionsSet.addAll(childCandidate1.getParticipatingPartitions());
+    factPartitionsSet.addAll(childCandidate2.getParticipatingPartitions());
+    return factPartitionsSet;
   }
 
   @Override
   public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext expr) {
-    return childCandidate1.isExpressionEvaluable(expr) || childCandidate1.isExpressionEvaluable(expr);
+    return childCandidate1.isExpressionEvaluable(expr) || childCandidate2.isExpressionEvaluable(expr);
+  }
+
+  @Override
+  public Set<Integer> getAnswerableMeasurePhraseIndices() {
+    Set<Integer> mesureIndices = new HashSet<>();
+    for (Candidate cand : getChildren()) {
+      mesureIndices.addAll(cand.getAnswerableMeasurePhraseIndices());
+    }
+    return mesureIndices;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index a53e994..cb1cd65 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -18,9 +18,11 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.*;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
 
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -37,35 +39,36 @@ class LeastPartitionResolver implements ContextRewriter {
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
-      Map<Set<CandidateFact>, Integer> factPartCount = new HashMap<Set<CandidateFact>, Integer>();
+    if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
+      Map<Candidate, Integer> factPartCount = new HashMap<>();
 
       //The number of partitions being calculated is not the actual number of partitions,
       // they are number of time values now instead of partitions.
       // This seems fine, as the less number of time values actually represent the rollups on time. And with
       // MaxCoveringFactResolver facts with less partitions which are not covering the range would be removed.
-      for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-        factPartCount.put(facts, getPartCount(facts));
+      for (Candidate candidate : cubeql.getCandidates()) {
+        factPartCount.put(candidate, getPartCount(candidate));
       }
 
       double minPartitions = Collections.min(factPartCount.values());
 
-      for (Iterator<Set<CandidateFact>> i = cubeql.getCandidateFactSets().iterator(); i.hasNext();) {
-        Set<CandidateFact> facts = i.next();
-        if (factPartCount.get(facts) > minPartitions) {
-          log.info("Not considering facts:{} from candidate fact tables as it requires more partitions to be"
-            + " queried:{} minimum:{}", facts, factPartCount.get(facts), minPartitions);
+      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext(); ) {
+        Candidate candidate = i.next();
+        if (factPartCount.get(candidate) > minPartitions) {
+          log.info("Not considering Candidate:{} as it requires more partitions to be" + " queried:{} minimum:{}",
+            candidate, factPartCount.get(candidate), minPartitions);
           i.remove();
+          cubeql.addCandidatePruningMsg(candidate,
+            new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.MORE_PARTITIONS));
         }
       }
-      cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.MORE_PARTITIONS);
     }
   }
 
-  private int getPartCount(Set<CandidateFact> set) {
+  private int getPartCount(Candidate candidate) {
     int parts = 0;
-    for (CandidateFact f : set) {
-      parts += f.getNumQueriedParts();
+    for (StorageCandidate sc : CandidateUtil.getStorageCandidates(candidate)) {
+      parts += sc.getNumQueriedParts();
     }
     return parts;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 57c9c44..2522d92 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -21,7 +21,6 @@ package org.apache.lens.cube.parse;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
@@ -31,11 +30,10 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.collect.Maps;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
- * Prune candidate fact sets so that the facts except the ones that are covering maximum of range are pruned
+ * Prune candidates except the ones that are covering maximum of range are pruned
  */
 @Slf4j
 class MaxCoveringFactResolver implements ContextRewriter {
@@ -53,7 +51,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
       // redundant computation.
       return;
     }
-    if (cubeql.getCube() == null || cubeql.getCandidateFactSets().size() <= 1) {
+    if (cubeql.getCube() == null || cubeql.getCandidates().size() <= 1) {
       // nothing to prune.
       return;
     }
@@ -66,15 +64,13 @@ class MaxCoveringFactResolver implements ContextRewriter {
   private void resolveByTimeCovered(CubeQueryContext cubeql) {
     // For each part column, which candidate fact sets are covering how much amount.
     // Later, we'll maximize coverage for each queried part column.
-    Map<String, Map<Set<CandidateFact>, Long>> partCountsPerPartCol = Maps.newHashMap();
-    //TODO union: max covering set will be calculated based on List<Candidate>
-    //TODO union: Each candidate will provide Set<FactPartion> using {@link Candidate.getParticipatingPartitions}
-    for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-      for (Map.Entry<String, Long> entry : getTimeCoveredForEachPartCol(facts).entrySet()) {
+    Map<String, Map<Candidate, Long>> partCountsPerPartCol = Maps.newHashMap();
+    for (Candidate cand : cubeql.getCandidates()) {
+      for (Map.Entry<String, Long> entry : getTimeCoveredForEachPartCol(cand).entrySet()) {
         if (!partCountsPerPartCol.containsKey(entry.getKey())) {
-          partCountsPerPartCol.put(entry.getKey(), Maps.<Set<CandidateFact>, Long>newHashMap());
+          partCountsPerPartCol.put(entry.getKey(), Maps.<Candidate, Long>newHashMap());
         }
-        partCountsPerPartCol.get(entry.getKey()).put(facts, entry.getValue());
+        partCountsPerPartCol.get(entry.getKey()).put(cand, entry.getValue());
       }
     }
     // for each queried partition, prune fact sets that are covering less range than max
@@ -82,29 +78,32 @@ class MaxCoveringFactResolver implements ContextRewriter {
       if (partCountsPerPartCol.get(partColQueried) != null) {
         long maxTimeCovered = Collections.max(partCountsPerPartCol.get(partColQueried).values());
         TimeCovered timeCovered = new TimeCovered(maxTimeCovered);
-        Iterator<Set<CandidateFact>> iter = cubeql.getCandidateFactSets().iterator();
+        Iterator<Candidate> iter = cubeql.getCandidates().iterator();
         while (iter.hasNext()) {
-          Set<CandidateFact> facts = iter.next();
-          Long timeCoveredLong = partCountsPerPartCol.get(partColQueried).get(facts);
+          Candidate candidate = iter.next();
+          Long timeCoveredLong = partCountsPerPartCol.get(partColQueried).get(candidate);
           if (timeCoveredLong == null) {
             timeCoveredLong = 0L;
           }
           if (timeCoveredLong < maxTimeCovered) {
-            log.info("Not considering facts:{} from candidate fact tables as it covers less time than the max"
-                    + " for partition column: {} which is: {}", facts, partColQueried, timeCovered);
+            log.info("Not considering Candidate:{} from Candidate set as it covers less time than the max"
+              + " for partition column: {} which is: {}", candidate, partColQueried, timeCovered);
             iter.remove();
+            cubeql.addCandidatePruningMsg(candidate,
+              new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.LESS_DATA));
           }
         }
       }
     }
-    cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
+    //  cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
+
   }
 
   private void resolveByDataCompleteness(CubeQueryContext cubeql) {
     // From the list of  candidate fact sets, we calculate the maxDataCompletenessFactor.
     float maxDataCompletenessFactor = 0f;
-    for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-      float dataCompletenessFactor = computeDataCompletenessFactor(facts);
+    for (Candidate cand : cubeql.getCandidates()) {
+      float dataCompletenessFactor = computeDataCompletenessFactor(cand);
       if (dataCompletenessFactor > maxDataCompletenessFactor) {
         maxDataCompletenessFactor = dataCompletenessFactor;
       }
@@ -116,27 +115,26 @@ class MaxCoveringFactResolver implements ContextRewriter {
     }
 
     // We prune those candidate fact set, whose dataCompletenessFactor is less than maxDataCompletenessFactor
-    //TODO union : This needs to work on List<Candidate>
-    Iterator<Set<CandidateFact>> iter = cubeql.getCandidateFactSets().iterator();
+    Iterator<Candidate> iter = cubeql.getCandidates().iterator();
     while (iter.hasNext()) {
-      Set<CandidateFact> facts = iter.next();
-      float dataCompletenessFactor = computeDataCompletenessFactor(facts);
+      Candidate cand = iter.next();
+      float dataCompletenessFactor = computeDataCompletenessFactor(cand);
       if (dataCompletenessFactor < maxDataCompletenessFactor) {
-        log.info("Not considering facts:{} from candidate fact tables as the dataCompletenessFactor for this:{} is "
-                + "less than the max:{}", facts, dataCompletenessFactor, maxDataCompletenessFactor);
+        log.info("Not considering Candidate :{} from the list as the dataCompletenessFactor for this:{} is "
+          + "less than the max:{}", cand, dataCompletenessFactor, maxDataCompletenessFactor);
         iter.remove();
+        cubeql.addCandidatePruningMsg(cand,
+          new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.INCOMPLETE_PARTITION));
       }
     }
-    cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.incompletePartitions(null));
   }
 
-  //TODO union : This needs to work on Candidate
-  private float computeDataCompletenessFactor(Set<CandidateFact> facts) {
+  private float computeDataCompletenessFactor(Candidate cand) {
     float completenessFactor = 0f;
     int numPartition = 0;
-    for (CandidateFact fact : facts) {
-      if (fact.getDataCompletenessMap() != null) {
-        Map<String, Map<String, Float>> completenessMap = fact.getDataCompletenessMap();
+    for (StorageCandidate sc : CandidateUtil.getStorageCandidates(cand)) {
+      if (sc.getDataCompletenessMap() != null) {
+        Map<String, Map<String, Float>> completenessMap = sc.getDataCompletenessMap();
         for (Map<String, Float> partitionCompleteness : completenessMap.values()) {
           for (Float value : partitionCompleteness.values()) {
             numPartition++;
@@ -145,33 +143,30 @@ class MaxCoveringFactResolver implements ContextRewriter {
         }
       }
     }
-    return numPartition == 0 ? completenessFactor : completenessFactor/numPartition;
+    return numPartition == 0 ? completenessFactor : completenessFactor / numPartition;
   }
 
   /**
    * Returns time covered by fact set for each part column.
-   * @param facts
+   *
+   * @param cand
    * @return
    */
-  private Map<String, Long> getTimeCoveredForEachPartCol(Set<CandidateFact> facts) {
+  private Map<String, Long> getTimeCoveredForEachPartCol(Candidate cand) {
     Map<String, Long> ret = Maps.newHashMap();
     UpdatePeriod smallest = UpdatePeriod.values()[UpdatePeriod.values().length - 1];
-    for (CandidateFact fact : facts) {
-      for (FactPartition part : fact.getPartsQueried()) {
-        if (part.getPeriod().compareTo(smallest) < 0) {
-          smallest = part.getPeriod();
-        }
+    for (FactPartition part : cand.getParticipatingPartitions()) {
+      if (part.getPeriod().compareTo(smallest) < 0) {
+        smallest = part.getPeriod();
       }
     }
     PartitionRangesForPartitionColumns partitionRangesForPartitionColumns = new PartitionRangesForPartitionColumns();
-    for (CandidateFact fact : facts) {
-      for (FactPartition part : fact.getPartsQueried()) {
-        if (part.isFound()) {
-          try {
-            partitionRangesForPartitionColumns.add(part);
-          } catch (LensException e) {
-            log.error("invalid partition: ", e);
-          }
+    for (FactPartition part : cand.getParticipatingPartitions()) {
+      if (part.isFound()) {
+        try {
+          partitionRangesForPartitionColumns.add(part);
+        } catch (LensException e) {
+          log.error("invalid partition: ", e);
         }
       }
     }
@@ -200,17 +195,9 @@ class MaxCoveringFactResolver implements ContextRewriter {
     }
 
     public String toString() {
-      return new StringBuilder()
-        .append(days)
-        .append(" days, ")
-        .append(hours)
-        .append(" hours, ")
-        .append(minutes)
-        .append(" minutes, ")
-        .append(seconds)
-        .append(" seconds, ")
-        .append(milliseconds)
-        .append(" milliseconds.").toString();
+      return new StringBuilder().append(days).append(" days, ").append(hours).append(" hours, ").append(minutes)
+        .append(" minutes, ").append(seconds).append(" seconds, ").append(milliseconds).append(" milliseconds.")
+        .toString();
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
deleted file mode 100644
index 979c24b..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.lens.cube.parse.HQLParser.*;
-
-import java.util.*;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-
-import org.antlr.runtime.CommonToken;
-
-import com.google.common.collect.Lists;
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * Writes a join query with all the facts involved, with where, groupby and having expressions pushed down to the fact
- * queries.
- */
-@Slf4j
-class MultiFactHQLContext extends SimpleHQLContext {
-
-  private Set<CandidateFact> facts;
-  private CubeQueryContext query;
-  private Map<CandidateFact, SimpleHQLContext> factHQLContextMap = new HashMap<>();
-
-  MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
-    super();
-    this.query = query;
-    this.facts = facts;
-    for (CandidateFact fact : facts) {
-      if (fact.getStorageTables().size() > 1) {
-        factHQLContextMap.put(fact, new SingleFactMultiStorageHQLContext(fact, dimsToQuery, query, fact));
-      } else {
-        factHQLContextMap.put(fact,
-          new SingleFactSingleStorageHQLContext(fact, dimsToQuery, factDimMap.get(fact), query,
-            DefaultQueryAST.fromCandidateFact(fact, fact.getStorageTables().iterator().next(), fact)));
-      }
-    }
-  }
-
-  protected void setMissingExpressions() throws LensException {
-    setSelect(getSelectString());
-    setFrom(getFromString());
-    setWhere(getWhereString());
-    setGroupby(getGroupbyString());
-    setHaving(getHavingString());
-    setOrderby(getOrderbyString());
-  }
-
-  private String getOrderbyString() {
-    return query.getOrderByString();
-  }
-
-  private String getHavingString() {
-    return null;
-  }
-
-  private String getGroupbyString() {
-    return null;
-  }
-
-  private String getWhereString() {
-    return query.getWhereString();
-  }
-
-  public String toHQL() throws LensException {
-    return query.getInsertClause() + super.toHQL();
-  }
-
-  private String getSelectString() throws LensException {
-    Map<Integer, List<Integer>> selectToFactIndex = new HashMap<>(query.getSelectAST().getChildCount());
-    int fi = 1;
-    for (CandidateFact fact : facts) {
-      for (int ind : fact.getSelectIndices()) {
-        if (!selectToFactIndex.containsKey(ind)) {
-          selectToFactIndex.put(ind, Lists.<Integer>newArrayList());
-        }
-        selectToFactIndex.get(ind).add(fi);
-      }
-      fi++;
-    }
-    StringBuilder select = new StringBuilder();
-    for (int i = 0; i < query.getSelectAST().getChildCount(); i++) {
-      if (selectToFactIndex.get(i) == null) {
-        throw new LensException(LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getLensErrorInfo(),
-          HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
-      }
-      if (selectToFactIndex.get(i).size() == 1) {
-        select.append("mq").append(selectToFactIndex.get(i).get(0)).append(".")
-          .append(query.getSelectPhrases().get(i).getSelectAlias()).append(" ");
-      } else {
-        select.append("COALESCE(");
-        String sep = "";
-        for (Integer factIndex : selectToFactIndex.get(i)) {
-          select.append(sep).append("mq").append(factIndex).append(".").append(
-            query.getSelectPhrases().get(i).getSelectAlias());
-          sep = ", ";
-        }
-        select.append(") ");
-      }
-      select.append(query.getSelectPhrases().get(i).getFinalAlias());
-      if (i != query.getSelectAST().getChildCount() - 1) {
-        select.append(", ");
-      }
-    }
-    return select.toString();
-  }
-
-  private String getMultiFactJoinCondition(int i, String dim) {
-    StringBuilder joinCondition = new StringBuilder();
-    if (i <= 1) {
-      return "".toString();
-    } else {
-      joinCondition.append("mq").append(i - 2).append(".").append(dim).append(" <=> ").
-          append("mq").append(i - 1).append(".").append(dim);
-    }
-    return joinCondition.toString();
-  }
-
-  private String getFromString() throws LensException {
-    StringBuilder fromBuilder = new StringBuilder();
-    int aliasCount = 1;
-    String sep = "";
-    for (CandidateFact fact : facts) {
-      SimpleHQLContext facthql = factHQLContextMap.get(fact);
-      fromBuilder.append(sep).append("(").append(facthql.toHQL()).append(")").append(" mq").append(aliasCount++);
-      sep = " full outer join ";
-      if (!fact.getDimFieldIndices().isEmpty() && aliasCount > 2) {
-        fromBuilder.append(" on ");
-        Iterator<Integer> dimIter = fact.getDimFieldIndices().iterator();
-        while (dimIter.hasNext()) {
-          String dim = query.getSelectPhrases().get(dimIter.next()).getSelectAlias();
-          fromBuilder.append(getMultiFactJoinCondition(aliasCount, dim));
-          if (dimIter.hasNext()) {
-            fromBuilder.append(" AND ");
-          }
-        }
-      }
-    }
-    return fromBuilder.toString();
-  }
-
-
-  public static ASTNode convertHavingToWhere(ASTNode havingAST, CubeQueryContext context, Set<CandidateFact> cfacts,
-    AliasDecider aliasDecider) throws LensException {
-    if (havingAST == null) {
-      return null;
-    }
-    if (isAggregateAST(havingAST) || isTableColumnAST(havingAST) || isNonAggregateFunctionAST(havingAST)) {
-      // if already present in select, pick alias
-      String alias = null;
-      for (CandidateFact fact : cfacts) {
-        if (fact.isExpressionAnswerable(havingAST, context)) {
-          alias = fact.addAndGetAliasFromSelect(havingAST, aliasDecider);
-          return new ASTNode(new CommonToken(HiveParser.Identifier, alias));
-        }
-      }
-    }
-    if (havingAST.getChildren() != null) {
-      for (int i = 0; i < havingAST.getChildCount(); i++) {
-        ASTNode replaced = convertHavingToWhere((ASTNode) havingAST.getChild(i), context, cfacts, aliasDecider);
-        havingAST.setChild(i, replaced);
-      }
-    }
-    return havingAST;
-  }
-
-  public static ASTNode pushDownHaving(ASTNode ast, CubeQueryContext cubeQueryContext, Set<CandidateFact> cfacts)
-    throws LensException {
-    if (ast == null) {
-      return null;
-    }
-    if (ast.getType() == HiveParser.KW_AND || ast.getType() == HiveParser.TOK_HAVING) {
-      List<ASTNode> children = Lists.newArrayList();
-      for (Node child : ast.getChildren()) {
-        ASTNode newChild = pushDownHaving((ASTNode) child, cubeQueryContext, cfacts);
-        if (newChild != null) {
-          children.add(newChild);
-        }
-      }
-      if (children.size() == 0) {
-        return null;
-      } else if (children.size() == 1) {
-        return children.get(0);
-      } else {
-        ASTNode newASTNode = new ASTNode(ast.getToken());
-        for (ASTNode child : children) {
-          newASTNode.addChild(child);
-        }
-        return newASTNode;
-      }
-    }
-    if (isPrimitiveBooleanExpression(ast)) {
-      CandidateFact fact = pickFactToPushDown(ast, cubeQueryContext, cfacts);
-      if (fact == null) {
-        return ast;
-      }
-      fact.addToHaving(ast);
-      return null;
-    }
-    return ast;
-  }
-
-  private static CandidateFact pickFactToPushDown(ASTNode ast, CubeQueryContext cubeQueryContext, Set<CandidateFact>
-    cfacts) throws LensException {
-    for (CandidateFact fact : cfacts) {
-      if (fact.isExpressionAnswerable(ast, cubeQueryContext)) {
-        return fact;
-      }
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index 64a9626..b011e47 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -36,6 +36,8 @@ import lombok.extern.slf4j.Slf4j;
 @EqualsAndHashCode(callSuper = true)
 @Slf4j
 class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedCubeFields {
+  // position in org.apache.lens.cube.parse.CubeQueryContext.queriedPhrases
+  private int position;
   private final ASTNode exprAST;
   private Boolean aggregate;
   private String expr;

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
index 62ceb12..77ebe82 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
@@ -18,14 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.commons.lang.StringUtils;
-
 import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
@@ -50,7 +44,7 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
   }
 
   SimpleHQLContext(String select, String from, String where, String groupby, String orderby, String having,
-    Integer limit) {
+                   Integer limit) {
     this.select = select;
     this.from = from;
     this.where = where;
@@ -73,6 +67,7 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
    * <p></p>
    * Leaving this empty implementation for the case of all expressions being passed in constructor. If other
    * constructors are used the missing expressions should be set here
+   *
    * @throws LensException
    */
   protected void setMissingExpressions() throws LensException {
@@ -80,57 +75,6 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
 
   public String toHQL() throws LensException {
     setMissingExpressions();
-    String qfmt = getQueryFormat();
-    Object[] queryTreeStrings = getQueryTreeStrings();
-    if (log.isDebugEnabled()) {
-      log.debug("qfmt: {} Query strings: {}", qfmt, Arrays.toString(queryTreeStrings));
-    }
-    String baseQuery = String.format(qfmt, queryTreeStrings);
-    return baseQuery;
-  }
-
-  private String[] getQueryTreeStrings() throws LensException {
-    List<String> qstrs = new ArrayList<String>();
-    qstrs.add(select);
-    qstrs.add(from);
-    if (!StringUtils.isBlank(where)) {
-      qstrs.add(where);
-    }
-    if (!StringUtils.isBlank(groupby)) {
-      qstrs.add(groupby);
-    }
-    if (!StringUtils.isBlank(having)) {
-      qstrs.add(having);
-    }
-    if (!StringUtils.isBlank(orderby)) {
-      qstrs.add(orderby);
-    }
-    if (limit != null) {
-      qstrs.add(String.valueOf(limit));
-    }
-    return qstrs.toArray(new String[0]);
-  }
-
-  private final String baseQueryFormat = "SELECT %s FROM %s";
-
-  private String getQueryFormat() {
-    StringBuilder queryFormat = new StringBuilder();
-    queryFormat.append(baseQueryFormat);
-    if (!StringUtils.isBlank(where)) {
-      queryFormat.append(" WHERE %s");
-    }
-    if (!StringUtils.isBlank(groupby)) {
-      queryFormat.append(" GROUP BY %s");
-    }
-    if (!StringUtils.isBlank(having)) {
-      queryFormat.append(" HAVING %s");
-    }
-    if (!StringUtils.isBlank(orderby)) {
-      queryFormat.append(" ORDER BY %s");
-    }
-    if (limit != null) {
-      queryFormat.append(" LIMIT %s");
-    }
-    return queryFormat.toString();
+    return CandidateUtil.buildHQLString(select, from, where, groupby, orderby, having, limit);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
deleted file mode 100644
index 9b48213..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.lens.cube.parse;
-
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_ENABLE_STORAGES_UNION;
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_STORAGES_UNION;
-import static org.apache.lens.cube.parse.HQLParser.*;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-
-import org.antlr.runtime.CommonToken;
-
-public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
-
-  private final QueryAST ast;
-
-  private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
-  private AliasDecider aliasDecider = new DefaultAliasDecider();
-
-  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
-    CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    super(query, fact);
-    if (!query.getConf().getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
-      throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-    }
-    this.ast = ast;
-    processSelectAST();
-    processGroupByAST();
-    processHavingAST();
-    processOrderByAST();
-    processLimit();
-    setHqlContexts(getUnionContexts(fact, dimsToQuery, query, ast));
-  }
-
-  private void processSelectAST() {
-    ASTNode originalSelectAST = MetastoreUtil.copyAST(ast.getSelectAST());
-    ast.setSelectAST(new ASTNode(originalSelectAST.getToken()));
-    ASTNode outerSelectAST = processSelectExpression(originalSelectAST);
-    setSelect(getString(outerSelectAST));
-  }
-
-  private void processGroupByAST() {
-    if (ast.getGroupByAST() != null) {
-      setGroupby(getString(processExpression(ast.getGroupByAST())));
-    }
-  }
-
-  private void processHavingAST() throws LensException {
-    if (ast.getHavingAST() != null) {
-      setHaving(getString(processExpression(ast.getHavingAST())));
-      ast.setHavingAST(null);
-    }
-  }
-
-
-  private void processOrderByAST() {
-    if (ast.getOrderByAST() != null) {
-      setOrderby(getString(processOrderbyExpression(ast.getOrderByAST())));
-      ast.setOrderByAST(null);
-    }
-  }
-
-  private void processLimit() {
-    setLimit(ast.getLimitValue());
-    ast.setLimitValue(null);
-  }
-
-  private ASTNode processExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // iterate over all children of the ast and get outer ast corresponding to it.
-    for (Node child : astNode.getChildren()) {
-      outerExpression.addChild(getOuterAST((ASTNode)child));
-    }
-    return outerExpression;
-  }
-
-  private ASTNode processSelectExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // iterate over all children of the ast and get outer ast corresponding to it.
-    for (Node node : astNode.getChildren()) {
-      ASTNode child = (ASTNode)node;
-      ASTNode outerSelect = new ASTNode(child);
-      ASTNode selectExprAST = (ASTNode)child.getChild(0);
-      ASTNode outerAST = getOuterAST(selectExprAST);
-      outerSelect.addChild(outerAST);
-
-      // has an alias? add it
-      if (child.getChildCount() > 1) {
-        outerSelect.addChild(child.getChild(1));
-      }
-      outerExpression.addChild(outerSelect);
-    }
-    return outerExpression;
-  }
-
-  private ASTNode processOrderbyExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // sample orderby AST looks the following :
-    /*
-    TOK_ORDERBY
-   TOK_TABSORTCOLNAMEDESC
-      TOK_NULLS_LAST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            cityid
-   TOK_TABSORTCOLNAMEASC
-      TOK_NULLS_FIRST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            stateid
-   TOK_TABSORTCOLNAMEASC
-      TOK_NULLS_FIRST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            zipcode
-     */
-    for (Node node : astNode.getChildren()) {
-      ASTNode child = (ASTNode)node;
-      ASTNode outerOrderby = new ASTNode(child);
-      ASTNode tokNullsChild = (ASTNode) child.getChild(0);
-      ASTNode outerTokNullsChild = new ASTNode(tokNullsChild);
-      outerTokNullsChild.addChild(getOuterAST((ASTNode)tokNullsChild.getChild(0)));
-      outerOrderby.addChild(outerTokNullsChild);
-      outerExpression.addChild(outerOrderby);
-    }
-    return outerExpression;
-  }
-  /*
-
-  Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
-  inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
-
-  Base cases:
-   1. ast is null => null
-   2. ast is aggregate_function(table.column) => add aggregate_function(table.column) to inner select expressions,
-            generate alias, return aggregate_function(cube.alias). Memoize the mapping
-            aggregate_function(table.column) => aggregate_function(cube.alias)
-            Assumption is aggregate_function is transitive i.e. f(a,b,c,d) = f(f(a,b), f(c,d)). SUM, MAX, MIN etc
-            are transitive, while AVG, COUNT etc are not. For non-transitive aggregate functions, the re-written
-            query will be incorrect.
-   3. ast has aggregates - iterate over children and add the non aggregate nodes as is and recursively get outer ast
-   for aggregate.
-   4. If no aggregates, simply select its alias in outer ast.
-   5. If given ast is memorized as mentioned in the above cases, return the mapping.
-   */
-  private ASTNode getOuterAST(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
-      return innerToOuterASTs.get(new HashableASTNode(astNode));
-    }
-    if (isAggregateAST(astNode)) {
-      return processAggregate(astNode);
-    } else if (hasAggregate(astNode)) {
-      ASTNode outerAST = new ASTNode(astNode);
-      for (Node child : astNode.getChildren()) {
-        ASTNode childAST = (ASTNode) child;
-        if (hasAggregate(childAST)) {
-          outerAST.addChild(getOuterAST(childAST));
-        } else {
-          outerAST.addChild(childAST);
-        }
-      }
-      return outerAST;
-    } else {
-      ASTNode innerSelectASTWithoutAlias = MetastoreUtil.copyAST(astNode);
-      ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
-      innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = aliasDecider.decideAlias(astNode);
-      ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
-      innerSelectExprAST.addChild(aliasNode);
-      addToInnerSelectAST(innerSelectExprAST);
-      ASTNode outerAST = getDotAST(query.getCube().getName(), alias);
-      innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-      return outerAST;
-    }
-  }
-
-  private ASTNode processAggregate(ASTNode astNode) {
-    ASTNode innerSelectASTWithoutAlias = MetastoreUtil.copyAST(astNode);
-    ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
-    innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-    String alias = aliasDecider.decideAlias(astNode);
-    ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
-    innerSelectExprAST.addChild(aliasNode);
-    addToInnerSelectAST(innerSelectExprAST);
-    ASTNode dotAST = getDotAST(query.getCube().getName(), alias);
-    ASTNode outerAST = new ASTNode(new CommonToken(TOK_FUNCTION));
-    //TODO: take care or non-transitive aggregate functions
-    outerAST.addChild(new ASTNode(new CommonToken(Identifier, astNode.getChild(0).getText())));
-    outerAST.addChild(dotAST);
-    innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-    return outerAST;
-  }
-
-  private void addToInnerSelectAST(ASTNode selectExprAST) {
-    if (ast.getSelectAST() == null) {
-      ast.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
-    }
-    ast.getSelectAST().addChild(selectExprAST);
-  }
-
-  private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
-    dimsToQuery, CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    ArrayList<HQLContextInterface> contexts = new ArrayList<>();
-    String alias = query.getAliasForTableName(query.getCube().getName());
-    for (String storageTable : fact.getStorageTables()) {
-      SingleFactSingleStorageHQLContext ctx = new SingleFactSingleStorageHQLContext(fact, storageTable + " " + alias,
-        dimsToQuery, query, DefaultQueryAST.fromCandidateFact(fact, storageTable, ast));
-      contexts.add(ctx);
-    }
-    return contexts;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
deleted file mode 100644
index dbc84ed..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.server.api.error.LensException;
-
-/**
- * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
- * queried.
- * <p/>
- * Updates from string with join clause expanded
- */
-class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
-
-  private final CandidateFact fact;
-  private String storageAlias;
-
-  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
-    CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    this(fact, dimsToQuery, dimsToQuery.keySet(), query, ast);
-  }
-
-  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
-    Set<Dimension> dimsQueried, CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    super(dimsToQuery, dimsQueried, query, ast);
-    this.fact = fact;
-  }
-
-  SingleFactSingleStorageHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
-    CubeQueryContext query, QueryAST ast) throws LensException {
-    this(fact, dimsToQuery, query, ast);
-    this.storageAlias = storageAlias;
-  }
-
-  @Override
-  protected String getFromTable() throws LensException {
-    if (getQuery().isAutoJoinResolved()) {
-      if (storageAlias != null) {
-        return storageAlias;
-      } else {
-        return fact.getStorageString(query.getAliasForTableName(query.getCube().getName()));
-      }
-    } else {
-      if (fact.getStorageTables().size() == 1) {
-        return getQuery().getQBFromString(fact, getDimsToQuery());
-      } else {
-        return storageAlias;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
index 22038f3..636b1d0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.StorageUtil.*;
 
@@ -31,8 +32,13 @@ import org.apache.lens.server.api.metastore.DataCompletenessChecker;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.Sets;
 import lombok.Getter;
@@ -45,6 +51,7 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class StorageCandidate implements Candidate, CandidateTable {
 
+  // TODO union : Put comments on member variables.
   @Getter
   private final CubeQueryContext cubeql;
   private final TimeRangeWriter rangeWriter;
@@ -57,9 +64,11 @@ public class StorageCandidate implements Candidate, CandidateTable {
   /**
    * Valid udpate periods populated by Phase 1.
    */
+  @Getter
   private TreeSet<UpdatePeriod> validUpdatePeriods = new TreeSet<>();
   private Configuration conf = null;
-  private Map<String, Map<String, Float>> incompleteMeasureData = new HashMap<>();
+  @Getter
+  private Map<String, Map<String, Float>> dataCompletenessMap = new HashMap<>();
   private SimpleDateFormat partWhereClauseFormat = null;
   /**
    * Participating fact, storage and dimensions for this StorageCandidate
@@ -68,10 +77,24 @@ public class StorageCandidate implements Candidate, CandidateTable {
   private CubeFactTable fact;
   @Getter
   private String storageName;
+  @Getter
+  @Setter
+  private QueryAST queryAst;
   private Map<Dimension, CandidateDim> dimensions;
+  @Getter
   private Map<TimeRange, String> rangeToWhere = new LinkedHashMap<>();
   @Getter
+  @Setter
+  private String whereString;
+  @Getter
+  private final Set<Integer> answerableMeasurePhraseIndices = Sets.newHashSet();
+  @Getter
+  @Setter
+  private String fromString;
+  @Getter
   private CubeInterface cube;
+  @Getter
+  Map<Dimension, CandidateDim> dimsToQuery;
   /**
    * Cached fact columns
    */
@@ -86,17 +109,17 @@ public class StorageCandidate implements Candidate, CandidateTable {
   /**
    * Partition calculated by getPartition() method.
    */
-  private Set<FactPartition> storagePartitions = new HashSet<>();
+  @Getter
+  private Set<FactPartition> participatingPartitions = new HashSet<>();
   /**
    * Non existing partitions
    */
   private Set<String> nonExistingPartitions = new HashSet<>();
   @Getter
-  private String alias = null;
+  private int numQueriedParts = 0;
 
-  public StorageCandidate(CubeInterface cube, CubeFactTable fact, String storageName, String alias,
-    CubeQueryContext cubeql) {
-    if ((cube == null) || (fact == null) || (storageName == null) || (alias == null)) {
+  public StorageCandidate(CubeInterface cube, CubeFactTable fact, String storageName, CubeQueryContext cubeql) {
+    if ((cube == null) || (fact == null) || (storageName == null)) {
       throw new IllegalArgumentException("Cube,fact and storageName should be non null");
     }
     this.cube = cube;
@@ -104,7 +127,6 @@ public class StorageCandidate implements Candidate, CandidateTable {
     this.cubeql = cubeql;
     this.storageName = storageName;
     this.conf = cubeql.getConf();
-    this.alias = alias;
     this.name = MetastoreUtil.getFactOrDimtableStorageTableName(fact.getName(), storageName);
     rangeWriter = ReflectionUtils.newInstance(conf
       .getClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, CubeQueryConfUtil.DEFAULT_TIME_RANGE_WRITER,
@@ -120,19 +142,53 @@ public class StorageCandidate implements Candidate, CandidateTable {
       .getFloat(CubeQueryConfUtil.COMPLETENESS_THRESHOLD, CubeQueryConfUtil.DEFAULT_COMPLETENESS_THRESHOLD);
   }
 
-  @Override
-  public String toHQL() {
-    return null;
+  public StorageCandidate(StorageCandidate sc) {
+    this(sc.getCube(), sc.getFact(), sc.getStorageName(), sc.getCubeql());
+    // Copy update periods.
+    for (UpdatePeriod updatePeriod : sc.getValidUpdatePeriods()) {
+      this.validUpdatePeriods.add(updatePeriod);
+    }
   }
 
-  @Override
-  public QueryAST getQueryAst() {
-    return null;
+  static boolean containsAny(Collection<String> srcSet, Collection<String> colSet) {
+    if (colSet == null || colSet.isEmpty()) {
+      return true;
+    }
+    for (String column : colSet) {
+      if (srcSet.contains(column)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private void setMissingExpressions() throws LensException {
+    setFromString(String.format("%s", getFromTable()));
+    setWhereString(joinWithAnd(whereString, null));
+    if (cubeql.getHavingAST() != null) {
+      queryAst.setHavingAST(MetastoreUtil.copyAST(cubeql.getHavingAST()));
+    }
+  }
+
+  public void setAnswerableMeasurePhraseIndices(int index) {
+    answerableMeasurePhraseIndices.add(index);
+  }
+
+  public String toHQL() throws LensException {
+    setMissingExpressions();
+    // Check if the picked candidate is a StorageCandidate and in that case
+    // update the selectAST with final alias.
+    if (this == cubeql.getPickedCandidate()) {
+      CandidateUtil.updateFinalAlias(queryAst.getSelectAST(), cubeql);
+    }
+    return CandidateUtil
+      .buildHQLString(queryAst.getSelectString(), fromString, whereString, queryAst.getGroupByString(),
+        queryAst.getOrderByString(), queryAst.getHavingString(), queryAst.getLimitValue());
   }
 
   @Override
   public String getStorageString(String alias) {
-    return null;
+    return storageName + " " + alias;
   }
 
   @Override
@@ -158,6 +214,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
 
   @Override
   public Date getStartTime() {
+    // TODO union : get storage stat time and take max out of it
     return fact.getStartTime();
   }
 
@@ -211,6 +268,8 @@ public class StorageCandidate implements Candidate, CandidateTable {
    *
    * 4.If the monthly partitions are found, check for lookahead partitions and call getPartitions recursively for the
    * remaining time intervals i.e, [15 sep - 1 oct) and [1 Dec - 15 Dec)
+   *
+   * TODO union : Move this into util.
    */
   private boolean getPartitions(Date fromDate, Date toDate, String partCol, Set<FactPartition> partitions,
     TreeSet<UpdatePeriod> updatePeriods, boolean addNonExistingParts, boolean failOnPartialData,
@@ -227,25 +286,23 @@ public class StorageCandidate implements Candidate, CandidateTable {
     if (interval == UpdatePeriod.CONTINUOUS && rangeWriter.getClass().equals(BetweenTimeRangeWriter.class)) {
       FactPartition part = new FactPartition(partCol, fromDate, interval, null, partWhereClauseFormat);
       partitions.add(part);
-      part.getStorageTables().add(name);
+      part.getStorageTables().add(storageName);
       part = new FactPartition(partCol, toDate, interval, null, partWhereClauseFormat);
       partitions.add(part);
-      part.getStorageTables().add(name);
-      log.info("Added continuous fact partition for storage table {}", name);
+      part.getStorageTables().add(storageName);
+      log.info("Added continuous fact partition for storage table {}", storageName);
       return true;
     }
 
     if (!client.isStorageTableCandidateForRange(name, fromDate, toDate)) {
       cubeql.addStoragePruningMsg(this,
         new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
-      //      skipStorageCauses.put(name, new CandidateTablePruneCause.SkipStorageCause(RANGE_NOT_ANSWERABLE));
       return false;
     } else if (!client.partColExists(name, partCol)) {
       log.info("{} does not exist in {}", partCol, name);
-      //      skipStorageCauses.put(name, CandidateTablePruneCause.SkipStorageCause.partColDoesNotExist(partCol));
       List<String> missingCols = new ArrayList<>();
       missingCols.add(partCol);
-      cubeql.addStoragePruningMsg(this, partitionColumnsMissing(missingCols));
+      //      cubeql.addStoragePruningMsg(this, partitionColumnsMissing(missingCols));
       return false;
     }
 
@@ -303,7 +360,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
                 log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt);
                 Set<FactPartition> processTimeParts = getPartitions(
                   TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(processTimePartCol).build(),
-                  newset, true, false, missingPartitions);
+                  newset, true, failOnPartialData, missingPartitions);
                 log.debug("Look ahead partitions: {}", processTimeParts);
                 TimeRange timeRange = TimeRange.getBuilder().fromDate(dt).toDate(nextDt).build();
                 for (FactPartition pPart : processTimeParts) {
@@ -334,12 +391,12 @@ public class StorageCandidate implements Candidate, CandidateTable {
             // Add non existing partitions for all cases of whether we populate all non existing or not.
             missingPartitions.add(part);
             if (!failOnPartialData) {
-              if (client.isStorageTablePartitionACandidate(name, part.getPartSpec())) {
+              if (!client.isStorageTablePartitionACandidate(name, part.getPartSpec())) {
                 log.info("Storage tables not eligible");
                 return false;
               }
               partitions.add(part);
-              part.getStorageTables().add(name);
+              part.getStorageTables().add(storageName);
             }
           } else {
             log.info("No finer granual partitions exist for {}", part);
@@ -367,13 +424,14 @@ public class StorageCandidate implements Candidate, CandidateTable {
    * 2. getPartitions for timeRange and validUpdatePeriods
    */
   @Override
-  public boolean evaluateCompleteness(TimeRange timeRange, boolean failOnPartialData) throws LensException {
+  public boolean evaluateCompleteness(TimeRange timeRange, TimeRange parentTimeRange, boolean failOnPartialData)
+    throws LensException {
     // Check the measure tags.
     if (!evaluateMeasuresCompleteness(timeRange)) {
       log
-        .info("Fact table:{} has partitions with incomplete data: {} for given ranges: {}", fact, incompleteMeasureData,
+        .info("Fact table:{} has partitions with incomplete data: {} for given ranges: {}", fact, dataCompletenessMap,
           cubeql.getTimeRanges());
-      cubeql.addStoragePruningMsg(this, incompletePartitions(incompleteMeasureData));
+      cubeql.addStoragePruningMsg(this, incompletePartitions(dataCompletenessMap));
       if (failOnPartialData) {
         return false;
       }
@@ -387,15 +445,18 @@ public class StorageCandidate implements Candidate, CandidateTable {
     Set<FactPartition> rangeParts = getPartitions(timeRange, validUpdatePeriods, true, failOnPartialData, missingParts);
     String partCol = timeRange.getPartitionColumn();
     boolean partColNotSupported = rangeParts.isEmpty();
-    String storageTableName = getStorageName();
+    String storageTableName = getName();
+
     if (storagePruningMsgs.containsKey(storageTableName)) {
       List<CandidateTablePruneCause> causes = storagePruningMsgs.get(storageTableName);
       // Find the PART_COL_DOES_NOT_EXISTS
       for (CandidateTablePruneCause cause : causes) {
         if (cause.getCause().equals(CandidateTablePruneCode.PART_COL_DOES_NOT_EXIST)) {
-          partColNotSupported = cause.getNonExistantPartCols().contains(partCol);
+          partColNotSupported &= cause.getNonExistantPartCols().contains(partCol);
         }
       }
+    } else {
+      partColNotSupported = false;
     }
     TimeRange prevRange = timeRange;
     String sep = "";
@@ -421,6 +482,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
         break;
       }
     }
+    numQueriedParts += rangeParts.size();
     if (!unsupportedTimeDims.isEmpty()) {
       log.info("Not considering fact table:{} as it doesn't support time dimensions: {}", this.getFact(),
         unsupportedTimeDims);
@@ -436,15 +498,15 @@ public class StorageCandidate implements Candidate, CandidateTable {
     }
     String extraWhere = extraWhereClauseFallback.toString();
     if (!StringUtils.isEmpty(extraWhere)) {
-      rangeToWhere.put(timeRange, "((" + rangeWriter
+      rangeToWhere.put(parentTimeRange, "((" + rangeWriter
         .getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts)
         + ") and  (" + extraWhere + "))");
     } else {
-      rangeToWhere.put(timeRange, rangeWriter
+      rangeToWhere.put(parentTimeRange, rangeWriter
         .getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts));
     }
-    // Add all the partitions. storagePartitions contains all the partitions for previous time ranges also.
-    this.storagePartitions.addAll(rangeParts);
+    // Add all the partitions. participatingPartitions contains all the partitions for previous time ranges also.
+    this.participatingPartitions.addAll(rangeParts);
     return true;
   }
 
@@ -457,7 +519,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
     Set<String> measureTag = new HashSet<>();
     Map<String, String> tagToMeasureOrExprMap = new HashMap<>();
 
-    processMeasuresFromExprMeasures(cubeql, measureTag, tagToMeasureOrExprMap);
+    processExpressionsForCompleteness(cubeql, measureTag, tagToMeasureOrExprMap);
 
     Set<String> measures = cubeql.getQueriedMsrs();
     if (measures == null) {
@@ -491,10 +553,10 @@ public class StorageCandidate implements Candidate, CandidateTable {
             log.info("Completeness for the measure_tag {} is {}, threshold: {}, for the hour {}", tag,
               completenessResult.getValue(), completenessThreshold, formatter.format(completenessResult.getKey()));
             String measureorExprFromTag = tagToMeasureOrExprMap.get(tag);
-            Map<String, Float> incompletePartition = incompleteMeasureData.get(measureorExprFromTag);
+            Map<String, Float> incompletePartition = dataCompletenessMap.get(measureorExprFromTag);
             if (incompletePartition == null) {
               incompletePartition = new HashMap<>();
-              incompleteMeasureData.put(measureorExprFromTag, incompletePartition);
+              dataCompletenessMap.put(measureorExprFromTag, incompletePartition);
             }
             incompletePartition.put(formatter.format(completenessResult.getKey()), completenessResult.getValue());
             isDataComplete = true;
@@ -518,15 +580,49 @@ public class StorageCandidate implements Candidate, CandidateTable {
   }
 
   @Override
-  public Set<FactPartition> getParticipatingPartitions() {
-    return null;
-  }
-
-  @Override
   public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext expr) {
     return expr.isEvaluable(this);
   }
 
+  /**
+   * Update selectAST for StorageCandidate
+   * 1. Delete projected select expression if it's not answerable by StorageCandidate.
+   * 2. Replace the queried alias with select alias if both are different in a select expr.
+   *
+   * @param cubeql
+   * @throws LensException
+   */
+
+  public void updateAnswerableSelectColumns(CubeQueryContext cubeql) throws LensException {
+    // update select AST with selected fields
+    int currentChild = 0;
+    for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
+      ASTNode selectExpr = (ASTNode) queryAst.getSelectAST().getChild(currentChild);
+      Set<String> exprCols = HQLParser.getColsInExpr(cubeql.getAliasForTableName(cubeql.getCube()), selectExpr);
+      if (getColumns().containsAll(exprCols)) {
+        ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, Identifier);
+        String alias = cubeql.getSelectPhrases().get(i).getSelectAlias();
+        if (aliasNode != null) {
+          String queryAlias = aliasNode.getText();
+          if (!queryAlias.equals(alias)) {
+            // replace the alias node
+            ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
+            queryAst.getSelectAST().getChild(currentChild)
+              .replaceChildren(selectExpr.getChildCount() - 1, selectExpr.getChildCount() - 1, newAliasNode);
+          }
+        } else {
+          // add column alias
+          ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
+          queryAst.getSelectAST().getChild(currentChild).addChild(newAliasNode);
+        }
+      } else {
+        queryAst.getSelectAST().deleteChild(currentChild);
+        currentChild--;
+      }
+      currentChild++;
+    }
+  }
+
   @Override
   public boolean equals(Object obj) {
     if (super.equals(obj)) {
@@ -557,4 +653,37 @@ public class StorageCandidate implements Candidate, CandidateTable {
   public void addValidUpdatePeriod(UpdatePeriod updatePeriod) {
     this.validUpdatePeriods.add(updatePeriod);
   }
+
+  public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
+    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+    this.dimsToQuery = dimsToQuery;
+    String alias = cubeql.getAliasForTableName(cubeql.getCube().getName());
+    fromString = getAliasForTable(alias);
+    if (query.isAutoJoinResolved()) {
+      fromString = query.getAutoJoinCtx().getFromString(fromString, this, queryDims, dimsToQuery, query, cubeql);
+    }
+  }
+
+  private String getFromTable() throws LensException {
+    if (cubeql.isAutoJoinResolved()) {
+        return fromString;
+    } else {
+        return cubeql.getQBFromString(this, getDimsToQuery());
+    }
+  }
+
+  public String getAliasForTable(String alias) {
+    String database = SessionState.get().getCurrentDatabase();
+    String ret;
+    if (alias == null || alias.isEmpty()) {
+      ret = name;
+    } else {
+      ret = name + " " + alias;
+    }
+    if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
+      ret = database + "." + ret;
+    }
+    return ret;
+  }
+
 }