You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pu...@apache.org on 2017/02/08 02:23:39 UTC

[7/7] lens git commit: feature upadte 2 with query writing flow completed (Few test cases need to be fixed though)

feature upadte 2 with query writing flow completed (Few test cases need to be fixed though)


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/4af769ee
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/4af769ee
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/4af769ee

Branch: refs/heads/lens-1381
Commit: 4af769ee338c3f10d6c825eb45407d15278d5690
Parents: b6f0cc3
Author: Puneet Gupta,Sushil Mohanty and Lavkesh Lahngir <pu...@apache.org>
Authored: Wed Feb 8 07:51:54 2017 +0530
Committer: Puneet <pu...@inmobi.com>
Committed: Wed Feb 8 07:51:54 2017 +0530

----------------------------------------------------------------------
 .../NoCandidateFactAvailableException.java      |   5 +-
 .../lens/cube/metadata/FactPartition.java       |   1 +
 .../org/apache/lens/cube/parse/Candidate.java   |  58 +-
 .../parse/CandidateCoveringSetsResolver.java    | 152 ++--
 .../apache/lens/cube/parse/CandidateFact.java   |   3 +
 .../cube/parse/CandidateTablePruneCause.java    | 142 ++--
 .../lens/cube/parse/CandidateTableResolver.java | 117 ++-
 .../apache/lens/cube/parse/CandidateUtil.java   | 125 ++-
 .../apache/lens/cube/parse/ColumnResolver.java  |   2 +-
 .../lens/cube/parse/CubeQueryContext.java       | 249 +++---
 .../lens/cube/parse/CubeQueryRewriter.java      |  10 +-
 .../lens/cube/parse/DefaultAliasDecider.java    |   4 +
 .../apache/lens/cube/parse/DefaultQueryAST.java |  13 +-
 .../cube/parse/DenormalizationResolver.java     |  42 +-
 .../lens/cube/parse/ExpressionResolver.java     |  16 +-
 .../apache/lens/cube/parse/GroupbyResolver.java |  63 +-
 .../apache/lens/cube/parse/JoinCandidate.java   |  66 +-
 .../lens/cube/parse/LeastPartitionResolver.java |  35 +-
 .../cube/parse/MaxCoveringFactResolver.java     | 105 ++-
 .../lens/cube/parse/MultiFactHQLContext.java    | 238 ------
 .../lens/cube/parse/QueriedPhraseContext.java   |   2 +
 .../lens/cube/parse/SimpleHQLContext.java       |  62 +-
 .../parse/SingleFactMultiStorageHQLContext.java | 259 ------
 .../SingleFactSingleStorageHQLContext.java      |  73 --
 .../lens/cube/parse/StorageCandidate.java       | 211 ++++-
 .../lens/cube/parse/StorageTableResolver.java   | 143 ++--
 .../org/apache/lens/cube/parse/StorageUtil.java |   9 +-
 .../apache/lens/cube/parse/UnionCandidate.java  | 111 +--
 .../apache/lens/cube/parse/UnionHQLContext.java |  55 --
 .../lens/cube/parse/UnionQueryWriter.java       | 515 +++++++++++-
 .../lens/cube/parse/join/AutoJoinContext.java   |  56 +-
 .../cube/parse/join/BridgeTableJoinContext.java |  22 +-
 .../apache/lens/driver/cube/RewriterPlan.java   |   2 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  78 +-
 .../lens/cube/parse/TestAggregateResolver.java  |  70 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    | 802 ++++++++++---------
 .../lens/cube/parse/TestBridgeTableQueries.java | 400 ++++++---
 .../lens/cube/parse/TestCubeRewriter.java       | 375 +++++----
 .../cube/parse/TestDenormalizationResolver.java |  69 +-
 .../lens/cube/parse/TestExpressionResolver.java |  89 +-
 .../lens/cube/parse/TestJoinResolver.java       | 107 ++-
 .../lens/cube/parse/TestRewriterPlan.java       |  14 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  27 +-
 .../cube/parse/TestUnionAndJoinCandidates.java  | 138 +++-
 .../lens/cube/parse/TestUnionQueries.java       |   3 +-
 45 files changed, 2800 insertions(+), 2338 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
index b2568ff..7d12762 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -20,14 +20,15 @@ package org.apache.lens.cube.error;
 
 import org.apache.lens.cube.metadata.CubeFactTable;
 import org.apache.lens.cube.parse.PruneCauses;
+import org.apache.lens.cube.parse.StorageCandidate;
 import org.apache.lens.server.api.error.LensException;
 
 
 public class NoCandidateFactAvailableException extends LensException {
 
-  private final PruneCauses<CubeFactTable> briefAndDetailedError;
+  private final PruneCauses<StorageCandidate> briefAndDetailedError;
 
-  public NoCandidateFactAvailableException(PruneCauses<CubeFactTable> briefAndDetailedError) {
+  public NoCandidateFactAvailableException(PruneCauses<StorageCandidate> briefAndDetailedError) {
     super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
     this.briefAndDetailedError = briefAndDetailedError;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 6a8e0c1..86d6056 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -30,6 +30,7 @@ import lombok.Getter;
 import lombok.Setter;
 
 @EqualsAndHashCode
+// TODO union : Change the class name To StoragePartition
 public class FactPartition implements Comparable<FactPartition> {
   @Getter
   private final String partCol;

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
index 0d0ddb7..1987939 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
@@ -1,17 +1,14 @@
 package org.apache.lens.cube.parse;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
-import java.util.Map;
 import java.util.Set;
 
-import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-
 /**
  * This interface represents candidates that are involved in different phases of query rewriting.
  * At the lowest level, Candidate is represented by a StorageCandidate that has a fact on a storage
@@ -25,21 +22,6 @@ import org.apache.hadoop.hive.ql.parse.ASTNode;
 public interface Candidate {
 
   /**
-   * Returns String representation of this Candidate
-   * TODO decide if this method should be moved to QueryAST instead
-   *
-   * @return
-   */
-  String toHQL();
-
-  /**
-   * Returns Query AST
-   *
-   * @return
-   */
-  QueryAST getQueryAst();
-
-  /**
    * Returns all the fact columns
    *
    * @return
@@ -68,13 +50,6 @@ public interface Candidate {
   double getCost();
 
   /**
-   * Alias used for this candidate.
-   *
-   * @return
-   */
-  String getAlias();
-
-  /**
    * Returns true if this candidate contains the given candidate
    *
    * @param candidate
@@ -85,11 +60,11 @@ public interface Candidate {
   /**
    * Returns child candidates of this candidate if any.
    * Note: StorageCandidate will return null
+   *
    * @return
    */
   Collection<Candidate> getChildren();
 
-
   /**
    * Calculates if this candidate can answer the query for given time range based on actual data registered with
    * the underlying candidate storages. This method will also update any internal candidate data structures that are
@@ -97,43 +72,36 @@ public interface Candidate {
    *
    * @param timeRange         : TimeRange to check completeness for. TimeRange consists of start time, end time and the
    *                          partition column
+   * @param queriedTimeRange  : User quried timerange
    * @param failOnPartialData : fail fast if the candidate can answer the query only partially
    * @return true if this Candidate can answer query for the given time range.
    */
-  boolean evaluateCompleteness(TimeRange timeRange, boolean failOnPartialData)
+  boolean evaluateCompleteness(TimeRange timeRange, TimeRange queriedTimeRange, boolean failOnPartialData)
     throws LensException;
 
   /**
    * Returns the set of fact partitions that will participate in this candidate.
    * Note: This method can be called only after call to
-   * {@link #evaluateCompleteness(TimeRange, boolean)}
+   * {@link #evaluateCompleteness(TimeRange, TimeRange, boolean)}
    *
    * @return
    */
   Set<FactPartition> getParticipatingPartitions();
 
   /**
-   * TODO union: in case of join , one of the candidates should be able to answer the mesaure expression
-   * TODO union: In case of union, all the candidates should answer the expression
-   * TODO union : add isExpresionEvaluable() to Candidate
+   * Checks whether an expression is evaluable by a candidate
+   * 1. For a JoinCandidate, atleast one of the child candidates should be able to answer the expression
+   * 2. For a UnionCandidate, all child candidates should answer the expression
    *
    * @param expr
    * @return
    */
   boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext expr);
 
-  // Moved to CandidateUtil boolean isValidForTimeRange(TimeRange timeRange);
-  // Moved to CandidateUtil boolean isExpressionAnswerable(ASTNode node, CubeQueryContext context) throws LensException;
-  // NO caller Set<String> getTimePartCols(CubeQueryContext query) throws LensException;
-
-  //TODO add methods to update AST in this candidate in this class of in CandidateUtil.
-  //void updateFromString(CubeQueryContext query) throws LensException;
-
-  //void updateASTs(CubeQueryContext cubeql) throws LensException;
-
-  //void addToHaving(ASTNode ast)  throws LensException;
-
-  //Used Having push down flow
-  //String addAndGetAliasFromSelect(ASTNode ast, AliasDecider aliasDecider);
+  /**
+   * Gets the index positions of answerable measure phrases in CubeQueryContext#selectPhrases
+   * @return
+   */
+  Set<Integer> getAnswerableMeasurePhraseIndices();
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
index e961427..6d85edf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
@@ -1,81 +1,61 @@
 package org.apache.lens.cube.parse;
 
-import com.google.common.collect.Lists;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.hadoop.conf.Configuration;
+import java.util.*;
+
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TimeRange;
-
 import org.apache.lens.server.api.error.LensException;
 
-import java.util.*;
+import org.apache.hadoop.conf.Configuration;
+
+import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public class CandidateCoveringSetsResolver implements ContextRewriter {
 
   private List<Candidate> finalCandidates = new ArrayList<>();
-  private int unionCandidatealiasCounter = 0;
-  private int joinCandidatealiasCounter = 0;
-
   public CandidateCoveringSetsResolver(Configuration conf) {
   }
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-
+    List<QueriedPhraseContext> qpcList = cubeql.getQueriedPhrases();
     Set<QueriedPhraseContext> queriedMsrs = new HashSet<>();
-    for (QueriedPhraseContext qur : cubeql.getQueriedPhrases()) {
-      if (qur.hasMeasures(cubeql)) {
-        queriedMsrs.add(qur);
+    for (QueriedPhraseContext qpc : qpcList) {
+      if (qpc.hasMeasures(cubeql)) {
+        queriedMsrs.add(qpc);
       }
     }
     // if no measures are queried, add all StorageCandidates individually as single covering sets
     if (queriedMsrs.isEmpty()) {
       finalCandidates.addAll(cubeql.getCandidates());
     }
-
-    List<Candidate> unionSet = resolveRangeCoveringFactSet(cubeql, cubeql.getTimeRanges(), queriedMsrs);
-    List<List<Candidate>> measureCoveringSets = resolveJoinCandidates(unionSet, queriedMsrs, cubeql);
-    updateFinalCandidates(measureCoveringSets);
+    List<Candidate> timeRangeCoveringSet = resolveTimeRangeCoveringFactSet(cubeql, queriedMsrs, qpcList);
+    List<List<Candidate>> measureCoveringSets = resolveJoinCandidates(timeRangeCoveringSet, queriedMsrs, cubeql);
+    updateFinalCandidates(measureCoveringSets, cubeql);
     log.info("Covering candidate sets :{}", finalCandidates);
-
-    String msrString = CandidateUtil.getColumns(queriedMsrs).toString();
-    if (finalCandidates.isEmpty()) {
-      throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(), msrString);
-    }
-    // update final candidate sets
     cubeql.getCandidates().clear();
     cubeql.getCandidates().addAll(finalCandidates);
-    // TODO : we might need to prune if we maintian two data structures in CubeQueryContext.
-    //cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.columnNotFound(getColumns(queriedMsrs)));
-    //if (cubeql.getCandidates().size() == 0) {
-    //  throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(), msrString);
-   // }
   }
 
-  private Candidate createJoinCandidateFromUnionCandidates(List<Candidate> ucs) {
+  private Candidate createJoinCandidate(List<Candidate> childCandidates, CubeQueryContext cubeql) {
     Candidate cand;
-    if (ucs.size() >= 2) {
-      Candidate first = ucs.get(0);
-      Candidate second = ucs.get(1);
-      cand = new JoinCandidate(first, second, "jc" + joinCandidatealiasCounter++);
-      for (int i = 2; i < ucs.size(); i++) {
-        cand = new JoinCandidate(cand, ucs.get(i), "jc" + joinCandidatealiasCounter++);
-      }
-    } else {
-      cand = ucs.get(0);
+    Candidate first = childCandidates.get(0);
+    Candidate second = childCandidates.get(1);
+    cand = new JoinCandidate(first, second, cubeql);
+    for (int i = 2; i < childCandidates.size(); i++) {
+      cand = new JoinCandidate(cand, childCandidates.get(i), cubeql);
     }
     return cand;
   }
 
-  private void updateFinalCandidates(List<List<Candidate>> jcs) {
-    int aliasCounter = 0;
-    for (Iterator<List<Candidate>> itr = jcs.iterator(); itr.hasNext(); ) {
-      List<Candidate> jc = itr.next();
-      if (jc.size() == 1 && jc.iterator().next().getChildren().size() == 1) {
-        finalCandidates.add(jc.iterator().next().getChildren().iterator().next());
+  private void updateFinalCandidates(List<List<Candidate>> joinCandidates, CubeQueryContext cubeql) {
+    for (Iterator<List<Candidate>> itr = joinCandidates.iterator(); itr.hasNext(); ) {
+      List<Candidate> joinCandidate = itr.next();
+      if (joinCandidate.size() == 1) {
+        finalCandidates.add(joinCandidate.iterator().next());
       } else {
-        finalCandidates.add(createJoinCandidateFromUnionCandidates(jc));
+        finalCandidates.add(createJoinCandidate(joinCandidate, cubeql));
       }
     }
   }
@@ -99,8 +79,8 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     }
   }
 
-  private List<Candidate> resolveRangeCoveringFactSet(CubeQueryContext cubeql, List<TimeRange> ranges,
-                                           Set<QueriedPhraseContext> queriedMsrs) throws LensException {
+  private List<Candidate> resolveTimeRangeCoveringFactSet(CubeQueryContext cubeql,
+      Set<QueriedPhraseContext> queriedMsrs, List<QueriedPhraseContext> qpcList) throws LensException {
     // All Candidates
     List<Candidate> allCandidates = new ArrayList<Candidate>(cubeql.getCandidates());
     // Partially valid candidates
@@ -110,11 +90,13 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
       // Assuming initial list of candidates populated are StorageCandidate
       if (cand instanceof StorageCandidate) {
         StorageCandidate sc = (StorageCandidate) cand;
-        if (CandidateUtil.isValidForTimeRanges(sc, ranges)) {
-          candidateSet.add(sc);
+        if (CandidateUtil.isValidForTimeRanges(sc, cubeql.getTimeRanges())) {
+          candidateSet.add(CandidateUtil.cloneStorageCandidate(sc));
           continue;
-        } else if (CandidateUtil.isPartiallyValidForTimeRanges(sc, ranges)) {
+        } else if (CandidateUtil.isPartiallyValidForTimeRanges(sc, cubeql.getTimeRanges())) {
           allCandidatesPartiallyValid.add(CandidateUtil.cloneStorageCandidate(sc));
+        } else {
+          //TODO union : Add cause
         }
       } else {
         throw new LensException("Not a StorageCandidate!!");
@@ -122,29 +104,27 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     }
     // Get all covering fact sets
     List<UnionCandidate> unionCoveringSet =
-        getCombinations(new ArrayList<Candidate>(allCandidatesPartiallyValid));
+        getCombinations(new ArrayList<Candidate>(allCandidatesPartiallyValid), cubeql);
     // Sort the Collection based on no of elements
-    Collections.sort(unionCoveringSet, new CandidateUtil.UnionCandidateComparator<UnionCandidate>());
+    Collections.sort(unionCoveringSet, new CandidateUtil.ChildrenSizeBasedCandidateComparator<UnionCandidate>());
     // prune non covering sets
-    pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, ranges);
+    pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql.getTimeRanges());
     // prune candidate set which doesn't contain any common measure i
     pruneUnionCoveringSetWithoutAnyCommonMeasure(unionCoveringSet, queriedMsrs, cubeql);
     // prune redundant covering sets
     pruneRedundantUnionCoveringSets(unionCoveringSet);
     // pruing done in the previous steps, now create union candidates
     candidateSet.addAll(unionCoveringSet);
+    updateQueriableMeasures(candidateSet, qpcList, cubeql);
     return candidateSet ;
-
   }
 
   private boolean isMeasureAnswerablebyUnionCandidate(QueriedPhraseContext msr, Candidate uc,
-                                                   CubeQueryContext cubeql) throws LensException {
+      CubeQueryContext cubeql) throws LensException {
     // Candidate is a single StorageCandidate
-    if (uc.getChildren() == null ) {
-      if (!msr.isEvaluable(cubeql, (StorageCandidate) uc)) {
-        return false;
-      }
-    } else {
+    if ((uc instanceof StorageCandidate) && !msr.isEvaluable(cubeql, (StorageCandidate) uc)) {
+      return false;
+    } else if ((uc instanceof UnionCandidate)){
       for (Candidate cand : uc.getChildren()) {
         if (!msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
           return false;
@@ -155,8 +135,8 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
   }
 
   private void pruneUnionCoveringSetWithoutAnyCommonMeasure(List<UnionCandidate> ucs,
-                                                       Set<QueriedPhraseContext> queriedMsrs,
-                                                       CubeQueryContext cubeql) throws LensException {
+      Set<QueriedPhraseContext> queriedMsrs,
+      CubeQueryContext cubeql) throws LensException {
     for (ListIterator<UnionCandidate> itr = ucs.listIterator(); itr.hasNext(); ) {
       boolean toRemove = true;
       UnionCandidate uc = itr.next();
@@ -185,7 +165,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     }
   }
 
-  public List<UnionCandidate> getCombinations(final List<Candidate> candidates) {
+  public List<UnionCandidate> getCombinations(final List<Candidate> candidates, CubeQueryContext cubeql) {
     int aliasCounter = 0;
     List<UnionCandidate> combinations = new LinkedList<UnionCandidate>();
     int size = candidates.size();
@@ -202,19 +182,18 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
         clonedI = clonedI >>> 1;
         --count;
       }
-      combinations.add(new UnionCandidate(individualCombinationList, "uc" + unionCandidatealiasCounter++ ));
+      combinations.add(new UnionCandidate(individualCombinationList, cubeql ));
     }
     return combinations;
   }
 
   private List<List<Candidate>> resolveJoinCandidates(List<Candidate> unionCandidates,
-                                                           Set<QueriedPhraseContext> msrs,
-                                                           CubeQueryContext cubeql) throws LensException {
+      Set<QueriedPhraseContext> msrs, CubeQueryContext cubeql) throws LensException {
     List<List<Candidate>> msrCoveringSets = new ArrayList<>();
     List<Candidate> ucSet = new ArrayList<>(unionCandidates);
-    boolean evaluable = false;
     // Check if a single set can answer all the measures and exprsWithMeasures
     for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext(); ) {
+      boolean evaluable = false;
       Candidate uc = i.next();
       for (QueriedPhraseContext msr : msrs) {
         evaluable = isMeasureAnswerablebyUnionCandidate(msr, uc, cubeql) ? true : false;
@@ -256,4 +235,45 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     log.info("Covering set {} for measures {} with factsPassed {}", msrCoveringSets, msrs, ucSet);
     return msrCoveringSets;
   }
+
+  private void updateQueriableMeasures(List<Candidate> cands,
+      List<QueriedPhraseContext> qpcList, CubeQueryContext cubeql) throws LensException {
+    for (Candidate cand : cands ) {
+      updateStorageCandidateQueriableMeasures(cand, qpcList, cubeql);
+    }
+  }
+
+
+  private void updateStorageCandidateQueriableMeasures(Candidate unionCandidate,
+      List<QueriedPhraseContext> qpcList, CubeQueryContext cubeql) throws LensException {
+    QueriedPhraseContext msrPhrase;
+    boolean isEvaluable;
+    for (int index = 0; index < qpcList.size(); index++) {
+
+      if (!qpcList.get(index).hasMeasures(cubeql)) {
+        //Not a measure phrase. Skip it
+        continue;
+      }
+
+      msrPhrase = qpcList.get(index);
+      if (unionCandidate instanceof StorageCandidate && msrPhrase.isEvaluable(cubeql,
+          (StorageCandidate) unionCandidate)) {
+        ((StorageCandidate) unionCandidate).setAnswerableMeasurePhraseIndices(index);
+      } else if (unionCandidate instanceof UnionCandidate) {
+        isEvaluable = true;
+        for (Candidate childCandidate : unionCandidate.getChildren()) {
+          if (!msrPhrase.isEvaluable(cubeql, (StorageCandidate) childCandidate)) {
+            isEvaluable = false;
+            break;
+          }
+        }
+        if (isEvaluable) {
+          //Set the index for all the children in this case
+          for (Candidate childCandidate : unionCandidate.getChildren()) {
+            ((StorageCandidate) childCandidate).setAnswerableMeasurePhraseIndices(index);
+          }
+        }
+      }
+    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 18478f8..ef7b9bc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -43,6 +43,7 @@ import lombok.Setter;
 /**
  * Holds context of a candidate fact table.
  */
+@Deprecated
 public class CandidateFact implements CandidateTable, QueryAST {
   final CubeFactTable fact;
   @Getter
@@ -366,6 +367,7 @@ public class CandidateFact implements CandidateTable, QueryAST {
     return timePartDimensions;
   }
 
+  /*
   public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
     Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     fromString = "%s"; // to update the storage alias later
@@ -375,4 +377,5 @@ public class CandidateFact implements CandidateTable, QueryAST {
           query, this);
     }
   }
+  */
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 41814f0..cef8f37 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -56,6 +56,27 @@ public class CandidateTablePruneCause {
         };
       }
     },
+
+
+    // Moved from Stoarge causes .
+    //The storage is removed as its not set in property "lens.cube.query.valid.fact.<fact_name>.storagetables"
+    INVALID_STORAGE("Invalid Storage"),
+    // storage table does not exist. Commented as its not being used anywhere in master.
+    // STOARGE_TABLE_DOES_NOT_EXIST("Storage table does not exist"),
+    // storage has no update periods queried. Commented as its not being used anywhere in master.
+    // MISSING_UPDATE_PERIODS("Storage has no update periods"),
+    // no candidate update periods, update period cause will have why each
+    // update period is not a candidate
+    NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not candidate"),
+    // storage table has no partitions queried
+    NO_PARTITIONS("Storage table has no partitions"),
+    // partition column does not exist
+    PART_COL_DOES_NOT_EXIST("Partition column does not exist"),
+    // Range is not supported by this storage table
+    TIME_RANGE_NOT_ANSWERABLE("Range not answerable"),
+    // storage is not supported by execution engine/driver
+    UNSUPPORTED_STORAGE("Unsupported Storage"),
+    
     // least weight not satisfied
     MORE_WEIGHT("Picked table had more weight than minimum."),
     // partial data is enabled, another fact has more data.
@@ -77,8 +98,8 @@ public class CandidateTablePruneCause {
     // candidate table tries to get denormalized field from dimension and the
     // referred dimension is invalid.
     INVALID_DENORM_TABLE("Referred dimension is invalid in one of the candidate tables"),
-    // column not valid in cube table
-    COLUMN_NOT_VALID("Column not valid in cube table"),
+    // column not valid in cube table. Commented the below line as it's not being used in master.
+    //COLUMN_NOT_VALID("Column not valid in cube table"),
     // column not found in cube table
     COLUMN_NOT_FOUND("%s are not %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
@@ -159,25 +180,7 @@ public class CandidateTablePruneCause {
         }
         return new String[]{incompletePartitions.toString()};
       }
-    },
-
-    // Moved from Stoarge causes
-    INVALID_STORAGE("Invalid Storage"),
-    // storage table does not exist
-    STOARGE_TABLE_DOES_NOT_EXIST("Storage table does not exist"),
-    // storage has no update periods queried
-    MISSING_UPDATE_PERIODS("Storage has no update periods"),
-    // no candidate update periods, update period cause will have why each
-    // update period is not a candidate
-    NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not candidate"),
-    // storage table has no partitions queried
-    NO_PARTITIONS("Storage table has no partitions"),
-    // partition column does not exist
-    PART_COL_DOES_NOT_EXIST("Partition column does not exist"),
-    // Range is not supported by this storage table
-    TIME_RANGE_NOT_ANSWERABLE("Range not answerable"),
-    // storage is not supported by execution engine
-    UNSUPPORTED_STORAGE("Unsupported Storage");
+    };
 
     String errorFormat;
 
@@ -198,28 +201,6 @@ public class CandidateTablePruneCause {
     }
   }
 
-  //TODO union : Remove this enum. All values moved to CandidateTablePruneCode
-  @Deprecated
-  public enum SkipStorageCode {
-    // invalid storage table
-    INVALID,
-    // storage table does not exist
-    TABLE_NOT_EXIST,
-    // storage has no update periods queried
-    MISSING_UPDATE_PERIODS,
-    // no candidate update periods, update period cause will have why each
-    // update period is not a candidate
-    NO_CANDIDATE_PERIODS,
-    // storage table has no partitions queried
-    NO_PARTITIONS,
-    // partition column does not exist
-    PART_COL_DOES_NOT_EXIST,
-    // Range is not supported by this storage table
-    RANGE_NOT_ANSWERABLE,
-    // storage is not supported by execution engine
-    UNSUPPORTED
-  }
-
   public enum SkipUpdatePeriodCode {
     // invalid update period
     INVALID,
@@ -227,46 +208,12 @@ public class CandidateTablePruneCause {
     QUERY_INTERVAL_BIGGER
   }
 
-  @JsonWriteNullProperties(false)
-  @Data
-  @NoArgsConstructor
-  //TODO union:deprecate this sub class
-  @Deprecated
-  public static class SkipStorageCause {
-    private SkipStorageCode cause;
-    // update period to skip cause
-    private Map<String, SkipUpdatePeriodCode> updatePeriodRejectionCause;
-
-    private List<String> nonExistantPartCols;
-
-    @Deprecated
-    public SkipStorageCause(SkipStorageCode cause) {
-      this.cause = cause;
-    }
-
-    @Deprecated
-    public static SkipStorageCause partColDoesNotExist(String... partCols) {
-      SkipStorageCause ret = new SkipStorageCause(SkipStorageCode.PART_COL_DOES_NOT_EXIST);
-      ret.nonExistantPartCols = new ArrayList<String>();
-      for (String s : partCols) {
-        ret.nonExistantPartCols.add(s);
-      }
-      return ret;
-    }
-
-    @Deprecated
-    public static SkipStorageCause noCandidateUpdatePeriod(Map<String, SkipUpdatePeriodCode> causes) {
-      SkipStorageCause ret = new SkipStorageCause(SkipStorageCode.NO_CANDIDATE_PERIODS);
-      ret.updatePeriodRejectionCause = causes;
-      return ret;
-    }
-  }
+  // Used for Test cases only.
+  // storage to skip storage cause for  dim table
+  private Map<String, CandidateTablePruneCode> dimStoragePruningCauses;
 
   // cause for cube table
   private CandidateTablePruneCode cause;
-  // storage to skip storage cause
-  private Map<String, SkipStorageCause> storageCauses;
-
   // populated only incase of missing partitions cause
   private Set<String> missingPartitions;
   // populated only incase of incomplete partitions cause
@@ -285,7 +232,8 @@ public class CandidateTablePruneCause {
   // the fact is not partitioned by part col of the time dim and time dim is not a dim attribute
   private Set<String> unsupportedTimeDims;
   // time covered
-  private MaxCoveringFactResolver.TimeCovered maxTimeCovered;
+  // TODO union : Fix this after MaxCoveringFactResolver chnaged wrt. Candidate
+  //private MaxCoveringFactResolver.TimeCovered maxTimeCovered;
   // ranges in which fact is invalid
   private List<TimeRange> invalidRanges;
 
@@ -352,12 +300,14 @@ public class CandidateTablePruneCause {
     return cause;
   }
 
+  // TODO union : uncomment the below method after MaxCoveringFactResolver is fixed wrt. Candidate
+  /*
   public static CandidateTablePruneCause lessData(MaxCoveringFactResolver.TimeCovered timeCovered) {
     CandidateTablePruneCause cause = new CandidateTablePruneCause(LESS_DATA);
     cause.setMaxTimeCovered(timeCovered);
     return cause;
   }
-
+*/
   public static CandidateTablePruneCause noColumnPartOfAJoinPath(final Collection<String> colSet) {
     CandidateTablePruneCause cause =
       new CandidateTablePruneCause(NO_COLUMN_PART_OF_A_JOIN_PATH);
@@ -369,25 +319,29 @@ public class CandidateTablePruneCause {
     return cause;
   }
 
- //TDOO union : Remove this method
- @Deprecated
- public static CandidateTablePruneCause noCandidateStorages(Map<String, SkipStorageCause> storageCauses) {
+  public static CandidateTablePruneCause missingDefaultAggregate(String... names) {
+    CandidateTablePruneCause cause = new CandidateTablePruneCause(MISSING_DEFAULT_AGGREGATE);
+    cause.setColumnsMissingDefaultAggregate(Lists.newArrayList(names));
+    return cause;
+  }
+
+  /**
+   * This factroy menthod can be used when a Dim Table is pruned because all its Storages are pruned.
+   * @param dimStoragePruningCauses
+   * @return
+   */
+  public static CandidateTablePruneCause noCandidateStoragesForDimtable(
+      Map<String, CandidateTablePruneCode> dimStoragePruningCauses) {
     CandidateTablePruneCause cause = new CandidateTablePruneCause(NO_CANDIDATE_STORAGES);
-    cause.setStorageCauses(new HashMap<String, SkipStorageCause>());
-    for (Map.Entry<String, SkipStorageCause> entry : storageCauses.entrySet()) {
+    cause.setDimStoragePruningCauses(new HashMap<String, CandidateTablePruneCode>());
+    for (Map.Entry<String, CandidateTablePruneCode> entry : dimStoragePruningCauses.entrySet()) {
       String key = entry.getKey();
       key = key.substring(0, (key.indexOf("_") + key.length() + 1) % (key.length() + 1)); // extract the storage part
-      cause.getStorageCauses().put(key.toLowerCase(), entry.getValue());
+      cause.getDimStoragePruningCauses().put(key.toLowerCase(), entry.getValue());
     }
     return cause;
   }
 
-  public static CandidateTablePruneCause missingDefaultAggregate(String... names) {
-    CandidateTablePruneCause cause = new CandidateTablePruneCause(MISSING_DEFAULT_AGGREGATE);
-    cause.setColumnsMissingDefaultAggregate(Lists.newArrayList(names));
-    return cause;
-  }
-
   /**
    * Queried partition columns are not present in this Storage Candidate
    * @param missingPartitionColumns

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index dd098b1..7a885a2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -74,7 +74,9 @@ class CandidateTableResolver implements ContextRewriter {
       if (cubeql.getAutoJoinCtx() != null) {
         // Before checking for candidate table columns, prune join paths containing non existing columns
         // in populated candidate tables
-        cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(), cubeql.getCandidateFacts(), null);
+        //TODO rewrite : commented below line to compile
+        cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(),
+            CandidateUtil.getStorageCandidates(cubeql.getCandidates()), null);
         cubeql.getAutoJoinCtx().pruneAllPathsForCandidateDims(cubeql.getCandidateDimTables());
         cubeql.getAutoJoinCtx().refreshJoinPathColumns();
       }
@@ -82,7 +84,8 @@ class CandidateTableResolver implements ContextRewriter {
       // check for joined columns and denorm columns on refered tables
       resolveCandidateFactTablesForJoins(cubeql);
       resolveCandidateDimTablesForJoinsAndDenorms(cubeql);
-      cubeql.pruneCandidateFactSet(CandidateTablePruneCode.INVALID_DENORM_TABLE);
+      // TODO union : below method can be deleted from CubeQueryContext
+      //cubeql.pruneCandidateFactSet(CandidateTablePruneCode.INVALID_DENORM_TABLE);
       checkForQueriedColumns = true;
     }
   }
@@ -91,14 +94,25 @@ class CandidateTableResolver implements ContextRewriter {
     int aliasCounter = 0;
     if (cubeql.getCube() != null) {
       List<CubeFactTable> factTables = cubeql.getMetastoreClient().getAllFacts(cubeql.getCube());
+      // TODO union : Check for cube table partially valid, else remove it.
       if (factTables.isEmpty()) {
         throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(),
             cubeql.getCube().getName() + " does not have any facts");
       }
       for (CubeFactTable fact : factTables) {
-        StorageCandidate sc = new StorageCandidate(cubeql.getCube(), fact,
-            fact.getStorages().iterator().next(), "sc" + aliasCounter++, cubeql);
-        cubeql.getCandidates().add(sc);
+        Iterator<String> it = fact.getStorages().iterator();
+        //TODO union : Add MISSING_STORAGES pruning message
+        /* Moved this from StorageTableResolver
+        if (fact.getUpdatePeriods().isEmpty()) {
+          cubeql.addFactPruningMsgs(fact, new CandidateTablePruneCause(CandidateTablePruneCode.MISSING_STORAGES));
+          i.remove();
+          continue;
+        }
+        */
+        while(it.hasNext()) {
+          StorageCandidate sc = new StorageCandidate(cubeql.getCube(), fact, it.next(), cubeql);
+          cubeql.getCandidates().add(sc);
+        }
       }
       log.info("Populated storage candidates: {}", cubeql.getCandidates());
     }
@@ -162,7 +176,7 @@ class CandidateTableResolver implements ContextRewriter {
     for (CandidateTable candidate : optdim.requiredForCandidates) {
       if (candidate instanceof StorageCandidate) {
         log.info("Not considering storage candidate:{} as refered table does not have any valid dimtables", candidate);
-        cubeql.getCandidateFacts().remove(candidate);
+        cubeql.getCandidates().remove(candidate);
         cubeql.addStoragePruningMsg(((StorageCandidate) candidate), new CandidateTablePruneCause(
           CandidateTablePruneCode.INVALID_DENORM_TABLE));
       } else {
@@ -202,12 +216,12 @@ class CandidateTableResolver implements ContextRewriter {
 
   private static Date getFactColumnStartTime(CandidateTable table, String factCol) {
     Date startTime = null;
-    if (table instanceof CandidateFact) {
-      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) {
+    if (table instanceof StorageCandidate) {
+      for (String key : ((StorageCandidate) table).getFact().getProperties().keySet()) {
         if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
           String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_START_TIME_PFX);
           if (factCol.equals(propCol)) {
-            startTime = ((CandidateFact) table).fact.getDateFromProperty(key, false, true);
+            startTime = ((StorageCandidate) table).getFact().getDateFromProperty(key, false, true);
           }
         }
       }
@@ -217,12 +231,12 @@ class CandidateTableResolver implements ContextRewriter {
 
   private static Date getFactColumnEndTime(CandidateTable table, String factCol) {
     Date endTime = null;
-    if (table instanceof CandidateFact) {
-      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) {
+    if (table instanceof StorageCandidate) {
+      for (String key : ((StorageCandidate) table).getFact().getProperties().keySet()) {
         if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
           String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_END_TIME_PFX);
           if (factCol.equals(propCol)) {
-            endTime = ((CandidateFact) table).fact.getDateFromProperty(key, false, true);
+            endTime = ((StorageCandidate) table).getFact().getDateFromProperty(key, false, true);
           }
         }
       }
@@ -251,7 +265,7 @@ class CandidateTableResolver implements ContextRewriter {
         if (cand instanceof StorageCandidate) {
           StorageCandidate sc = (StorageCandidate) cand;
           if (validFactTables != null) {
-            if (!validFactTables.contains(sc.getName().toLowerCase())) {
+            if (!validFactTables.contains(sc.getFact().getName().toLowerCase())) {
               log.info("Not considering storage candidate:{} as it is not a valid candidate", sc);
               cubeql.addStoragePruningMsg(sc, new CandidateTablePruneCause(CandidateTablePruneCode.INVALID));
               i.remove();
@@ -311,10 +325,10 @@ class CandidateTableResolver implements ContextRewriter {
         } else {
           throw new LensException("Not a storage candidate!!");
         }
-        if (cubeql.getCandidates().size() == 0) {
-          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
-              getColumns(cubeql.getQueriedPhrases()).toString());
-        }
+      }
+      if (cubeql.getCandidates().size() == 0) {
+        throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
+            getColumns(cubeql.getQueriedPhrases()).toString());
       }
     }
   }
@@ -420,11 +434,10 @@ class CandidateTableResolver implements ContextRewriter {
       return;
     }
     Collection<String> colSet = null;
-    if (cubeql.getCube() != null && !cubeql.getCandidateFacts().isEmpty()) {
-      for (Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator(); i.hasNext();) {
-        CandidateFact cfact = i.next();
-        CubeFactTable fact = cfact.fact;
-
+    if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
+      for (Iterator<StorageCandidate> i =
+           CandidateUtil.getStorageCandidates(cubeql.getCandidates()).iterator(); i.hasNext();) {
+        StorageCandidate sc = i.next();
         // for each join path check for columns involved in path
         for (Map.Entry<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql
           .getAutoJoinCtx()
@@ -433,19 +446,19 @@ class CandidateTableResolver implements ContextRewriter {
           OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(reachableDim);
           colSet = joincolumnsEntry.getValue().get(cubeql.getCube());
 
-          if (!checkForFactColumnExistsAndValidForRange(cfact, colSet, cubeql)) {
+          if (!checkForFactColumnExistsAndValidForRange(sc, colSet, cubeql)) {
             if (optdim == null || optdim.isRequiredInJoinChain
-              || (optdim != null && optdim.requiredForCandidates.contains(cfact))) {
+              || (optdim != null && optdim.requiredForCandidates.contains(sc))) {
               i.remove();
-              log.info("Not considering fact table:{} as it does not have columns in any of the join paths."
-                + " Join columns:{}", fact, colSet);
-              cubeql.addFactPruningMsgs(fact, CandidateTablePruneCause.noColumnPartOfAJoinPath(colSet));
+              log.info("Not considering storage candidate :{} as it does not have columns in any of the join paths."
+                + " Join columns:{}", sc, colSet);
+              cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.noColumnPartOfAJoinPath(colSet));
               break;
             }
           }
         }
       }
-      if (cubeql.getCandidateFacts().size() == 0) {
+      if (cubeql.getCandidates().size() == 0) {
         throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
             colSet == null ? "NULL" : colSet.toString());
       }
@@ -522,12 +535,16 @@ class CandidateTableResolver implements ContextRewriter {
         if (removedCandidates.get(dim) != null) {
           for (CandidateTable candidate : removedCandidates.get(dim)) {
             if (!candidatesReachableThroughRefs.contains(candidate)) {
-              if (candidate instanceof CandidateFact) {
-                if (cubeql.getCandidateFacts().contains(candidate)) {
-                  log.info("Not considering fact:{} as its required optional dims are not reachable", candidate);
-                  cubeql.getCandidateFacts().remove(candidate);
-                  cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact,
-                    CandidateTablePruneCause.columnNotFound(col));
+              if (candidate instanceof StorageCandidate) {
+                if (cubeql.getCandidates().contains(candidate)) {
+                  log.info("Not considering Storage:{} as its required optional dims are not reachable", candidate);
+                  cubeql.getCandidates().remove(candidate);
+                  cubeql.addStoragePruningMsg((StorageCandidate) candidate,
+                      CandidateTablePruneCause.columnNotFound(col));
+                  Collection<Candidate> prunedCandidates = CandidateUtil.
+                      filterCandidates(cubeql.getCandidates(), (StorageCandidate) candidate);
+                  cubeql.addCandidatePruningMsg(prunedCandidates,
+                      new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
                 }
               } else if (cubeql.getCandidateDimTables().containsKey(((CandidateDim) candidate).getBaseTable())) {
                 log.info("Not considering dimtable:{} as its required optional dims are not reachable", candidate);
@@ -575,11 +592,11 @@ class CandidateTableResolver implements ContextRewriter {
               // candidate has other evaluable expressions
               continue;
             }
-            if (candidate instanceof CandidateFact) {
-              if (cubeql.getCandidateFacts().contains(candidate)) {
+            if (candidate instanceof StorageCandidate) {
+              if (cubeql.getCandidates().contains(candidate)) {
                 log.info("Not considering fact:{} as is not reachable through any optional dim", candidate);
-                cubeql.getCandidateFacts().remove(candidate);
-                cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact,
+                cubeql.getCandidates().remove(candidate);
+                cubeql.addStoragePruningMsg(((StorageCandidate) candidate),
                   CandidateTablePruneCause.expressionNotEvaluable(col.getExprCol()));
               }
             } else if (cubeql.getCandidateDimTables().containsKey(((CandidateDim) candidate).getBaseTable())) {
@@ -679,28 +696,4 @@ class CandidateTableResolver implements ContextRewriter {
     }
     return false;
   }
-
-  static boolean allEvaluable(StorageCandidate sc, Collection<QueriedPhraseContext> colSet,
-                              CubeQueryContext cubeql) throws LensException {
-    if (colSet == null || colSet.isEmpty()) {
-      return true;
-    }
-    for (QueriedPhraseContext qur : colSet) {
-      if (!qur.isEvaluable(cubeql, sc)) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  static Set<QueriedPhraseContext> coveredMeasures(StorageCandidate sc, Collection<QueriedPhraseContext> msrs,
-                                                   CubeQueryContext cubeql) throws LensException {
-    Set<QueriedPhraseContext> coveringSet = new HashSet<>();
-    for (QueriedPhraseContext msr : msrs) {
-      if (msr.isEvaluable(cubeql, sc)) {
-        coveringSet.add(msr);
-      }
-    }
-    return coveringSet;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
index dd3b1dd..6cb7e3f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
@@ -2,11 +2,15 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
+import org.antlr.runtime.CommonToken;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.lens.cube.metadata.CubeMetastoreClient;
+import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
@@ -15,6 +19,8 @@ import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.TreeRangeSet;
 
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
+
 /**
  * Placeholder for Util methods that will be required for {@link Candidate}
  */
@@ -92,7 +98,7 @@ public class CandidateUtil {
    * @param targetAst
    * @throws LensException
    */
-  public void copyASTs(QueryAST sourceAst, QueryAST targetAst) throws LensException {
+  public static void copyASTs(QueryAST sourceAst, QueryAST targetAst) throws LensException {
     targetAst.setSelectAST(MetastoreUtil.copyAST(sourceAst.getSelectAST()));
     targetAst.setWhereAST(MetastoreUtil.copyAST(sourceAst.getWhereAST()));
     if (sourceAst.getJoinAST() != null) {
@@ -101,6 +107,9 @@ public class CandidateUtil {
     if (sourceAst.getGroupByAST() != null) {
       targetAst.setGroupByAST(MetastoreUtil.copyAST(sourceAst.getGroupByAST()));
     }
+    if (sourceAst.getHavingAST() != null) {
+      targetAst.setHavingAST(MetastoreUtil.copyAST(sourceAst.getHavingAST()));
+    }
   }
 
   public static Set<StorageCandidate> getStorageCandidates(final Candidate candidate) {
@@ -109,7 +118,14 @@ public class CandidateUtil {
     }});
   }
 
-
+  /**
+   *
+   * @param candSet
+   * @param msrs
+   * @param cubeql
+   * @return
+   * @throws LensException
+   */
   public static Set<QueriedPhraseContext> coveredMeasures(Candidate candSet, Collection<QueriedPhraseContext> msrs,
     CubeQueryContext cubeql) throws LensException {
     Set<QueriedPhraseContext> coveringSet = new HashSet<>();
@@ -119,6 +135,7 @@ public class CandidateUtil {
           coveringSet.add(msr);
         }
       } else {
+        // TODO union : all candidates should answer
           for (Candidate cand : candSet.getChildren()) {
             if (msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
               coveringSet.add(msr);
@@ -195,14 +212,110 @@ public class CandidateUtil {
   }
 
   public static StorageCandidate cloneStorageCandidate(StorageCandidate sc) {
-    return new StorageCandidate(sc.getCube(), sc.getFact(), sc.getStorageName(), sc.getAlias(), sc.getCubeql());
+    return new StorageCandidate(sc);
   }
 
-  public static class UnionCandidateComparator<T> implements Comparator<UnionCandidate> {
-
+  public static class ChildrenSizeBasedCandidateComparator<T> implements Comparator<Candidate> {
     @Override
-    public int compare(UnionCandidate o1, UnionCandidate o2) {
+    public int compare(Candidate o1, Candidate o2) {
       return Integer.valueOf(o1.getChildren().size() - o2.getChildren().size());
     }
   }
+
+  private static final String baseQueryFormat = "SELECT %s FROM %s";
+
+  public static String buildHQLString(String select, String from, String where, String groupby, String orderby, String having,
+                                      Integer limit) {
+
+    List<String> qstrs = new ArrayList<String>();
+    qstrs.add(select);
+    qstrs.add(from);
+    if (!StringUtils.isBlank(where)) {
+      qstrs.add(where);
+    }
+    if (!StringUtils.isBlank(groupby)) {
+      qstrs.add(groupby);
+    }
+    if (!StringUtils.isBlank(having)) {
+      qstrs.add(having);
+    }
+    if (!StringUtils.isBlank(orderby)) {
+      qstrs.add(orderby);
+    }
+    if (limit != null) {
+      qstrs.add(String.valueOf(limit));
+    }
+
+    StringBuilder queryFormat = new StringBuilder();
+    queryFormat.append(baseQueryFormat);
+    if (!StringUtils.isBlank(where)) {
+      queryFormat.append(" WHERE %s");
+    }
+    if (!StringUtils.isBlank(groupby)) {
+      queryFormat.append(" GROUP BY %s");
+    }
+    if (!StringUtils.isBlank(having)) {
+      queryFormat.append(" HAVING %s");
+    }
+    if (!StringUtils.isBlank(orderby)) {
+      queryFormat.append(" ORDER BY %s");
+    }
+    if (limit != null) {
+      queryFormat.append(" LIMIT %s");
+    }
+    return String.format(queryFormat.toString(), qstrs.toArray(new String[0]));
+  }
+
+  /**
+   *
+   * @param selectAST Outer query selectAST
+   * @param cubeql Cubequery Context
+   *
+   *  Update the final alias in the outer select expressions
+   *  1. Replace queriedAlias with finalAlias if both are not same
+   *  2. If queriedAlias is missing add finalAlias as alias
+   */
+  public static void updateFinalAlias(ASTNode selectAST, CubeQueryContext cubeql) {
+    for (int i = 0; i < selectAST.getChildCount(); i++) {
+      ASTNode selectExpr = (ASTNode) selectAST.getChild(i);
+      ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, Identifier);
+      String finalAlias = cubeql.getSelectPhrases().get(i).getFinalAlias().replaceAll("`", "");
+      if (aliasNode != null) {
+        String queryAlias = aliasNode.getText();
+        if (!queryAlias.equals(finalAlias)) {
+          // replace the alias node
+          ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, finalAlias));
+          selectAST.getChild(i).replaceChildren(selectExpr.getChildCount() - 1,
+              selectExpr.getChildCount() - 1, newAliasNode);
+        }
+      } else {
+        // add column alias
+        ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, finalAlias));
+        selectAST.getChild(i).addChild(newAliasNode);
+      }
+    }
+  }
+
+  public static boolean containsAny(Set<String> srcSet, Set<String> colSet) {
+    if (colSet == null || colSet.isEmpty()) {
+      return true;
+    }
+    for (String column : colSet) {
+      if (srcSet.contains(column)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+
+  public static Set<String> getMissingPartitions(StorageCandidate sc) {
+    Set<String> missingParts = new HashSet<>();
+    for (FactPartition part : sc.getParticipatingPartitions()) {
+      if (!part.isFound()) {
+        missingParts.add(part.toString()); //TODOD union . add approprite partition String
+      }
+    }
+    return missingParts;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
index 87e094a..21cdd26 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
@@ -294,7 +294,7 @@ class ColumnResolver implements ContextRewriter {
     return Optional.fromNullable(funcName);
   }
 
-  private static void addColumnsForSelectExpr(final TrackQueriedColumns sel, ASTNode node, ASTNode parent,
+  static void addColumnsForSelectExpr(final TrackQueriedColumns sel, ASTNode node, ASTNode parent,
     Set<String> cols) {
     if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && parent.getToken().getType() != DOT)) {
       // Take child ident.totext

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 58fc5b1..470d6e7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -102,10 +102,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
   // Mapping of a qualified column name to its table alias
   private final Map<String, String> colToTableAlias = new HashMap<>();
 
-  //TODO union: remove candidateFactSets and use
-  @Getter
-  private final Set<Set<CandidateFact>> candidateFactSets = new HashSet<>();
-
   /**
    * This is the set of working Candidates that gets updated during different phases of
    * query resolution. Each {@link ContextRewriter} may add/remove/update Candiadtes in
@@ -139,7 +135,9 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
 
   void addQueriedPhrase(QueriedPhraseContext qur) {
     queriedPhrases.add(qur);
+    qur.setPosition(queriedPhrases.size() -1);
   }
+
   @Getter
   private final List<SelectPhraseContext> selectPhrases = new ArrayList<>();
 
@@ -149,13 +147,8 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
   // Join conditions used in all join expressions
   @Getter
   private final Map<QBJoinTree, String> joinConds = new HashMap<QBJoinTree, String>();
-
-  // storage specific
-  @Getter
-  protected final Set<CandidateFact> candidateFacts = new HashSet<CandidateFact>();
   @Getter
   protected final Map<Dimension, Set<CandidateDim>> candidateDims = new HashMap<Dimension, Set<CandidateDim>>();
-
   // query trees
   @Getter
   @Setter
@@ -495,7 +488,7 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
 
   /**
    * TODO union : deprecate this method and use
-   * {@link #addFactPruningMsg(CubeInterface, CubeFactTable, CandidateTablePruneCause)}
+   * {@link # addFactPruningMsg(CubeInterface, CubeFactTable, CandidateTablePruneCause)}
    * or
    * {@link #addStoragePruningMsg(StorageCandidate, CandidateTablePruneCause)}
    * */
@@ -513,10 +506,17 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     }
   }
 */
-  public void addCandidatePruningMsg(Candidate cand, CandidateTablePruneCause factPruningMsg) {
+  public void addCandidatePruningMsg(Collection<Candidate> candidateCollection, CandidateTablePruneCause pruneCause) {
+    for (Candidate c : candidateCollection){
+      addCandidatePruningMsg(c, pruneCause);
+    }
+
+  }
+
+  public void addCandidatePruningMsg(Candidate cand, CandidateTablePruneCause pruneCause) {
     Set<StorageCandidate> scs = CandidateUtil.getStorageCandidates(cand);
     for (StorageCandidate sc : scs) {
-      addStoragePruningMsg(sc, factPruningMsg);
+      addStoragePruningMsg(sc, pruneCause);
     }
   }
 
@@ -678,11 +678,11 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     }
   }
 
-  void updateFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+  void updateFromString(StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     fromString = "%s"; // storage string is updated later
     if (isAutoJoinResolved()) {
       fromString =
-        getAutoJoinCtx().getFromString(fromString, fact, dimsToQuery.keySet(), dimsToQuery, this, this);
+        getAutoJoinCtx().getFromString(fromString, sc, dimsToQuery.keySet(), dimsToQuery, this, this);
     }
   }
 
@@ -735,9 +735,9 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     qb.getParseInfo().setDestLimit(getClause(), 0, value);
   }
 
-  private String getStorageStringWithAlias(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
+  private String getStorageStringWithAlias(StorageCandidate candidate, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
     if (cubeTbls.get(alias) instanceof CubeInterface) {
-      return fact.getStorageString(alias);
+      return candidate.getAliasForTable(alias);
     } else {
       return dimsToQuery.get(cubeTbls.get(alias)).getStorageString(alias);
     }
@@ -747,14 +747,14 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     return StorageUtil.getWhereClause(dimsToQuery.get(cubeTbls.get(alias)), alias);
   }
 
-  String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+  String getQBFromString(StorageCandidate candidate, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     String fromString;
     if (getJoinAST() == null) {
       if (cube != null) {
         if (dimensions.size() > 0) {
           throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
         }
-        fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
+        fromString = candidate.getAliasForTable(getAliasForTableName(cube.getName()));
       } else {
         if (dimensions.size() != 1) {
           throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
@@ -764,22 +764,23 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
       }
     } else {
       StringBuilder builder = new StringBuilder();
-      getQLString(qb.getQbJoinTree(), builder, fact, dimsToQuery);
+      getQLString(qb.getQbJoinTree(), builder, candidate, dimsToQuery);
       fromString = builder.toString();
     }
     return fromString;
   }
 
-  private void getQLString(QBJoinTree joinTree, StringBuilder builder, CandidateFact fact,
+
+  private void getQLString(QBJoinTree joinTree, StringBuilder builder, StorageCandidate candidate,
     Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     List<String> joiningTables = new ArrayList<>();
     if (joinTree.getBaseSrc()[0] == null) {
       if (joinTree.getJoinSrc() != null) {
-        getQLString(joinTree.getJoinSrc(), builder, fact, dimsToQuery);
+        getQLString(joinTree.getJoinSrc(), builder, candidate, dimsToQuery);
       }
     } else { // (joinTree.getBaseSrc()[0] != null){
       String alias = joinTree.getBaseSrc()[0].toLowerCase();
-      builder.append(getStorageStringWithAlias(fact, dimsToQuery, alias));
+      builder.append(getStorageStringWithAlias(candidate , dimsToQuery, alias));
       joiningTables.add(alias);
     }
     if (joinTree.getJoinCond() != null) {
@@ -788,11 +789,11 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     }
     if (joinTree.getBaseSrc()[1] == null) {
       if (joinTree.getJoinSrc() != null) {
-        getQLString(joinTree.getJoinSrc(), builder, fact, dimsToQuery);
+        getQLString(joinTree.getJoinSrc(), builder, candidate, dimsToQuery);
       }
     } else { // (joinTree.getBaseSrc()[1] != null){
       String alias = joinTree.getBaseSrc()[1].toLowerCase();
-      builder.append(getStorageStringWithAlias(fact, dimsToQuery, alias));
+      builder.append(getStorageStringWithAlias(candidate, dimsToQuery, alias));
       joiningTables.add(alias);
     }
 
@@ -884,20 +885,20 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     return dimsToQuery;
   }
 
-  private Set<CandidateFact> pickCandidateFactToQuery() throws LensException {
-    Set<CandidateFact> facts = null;
+  private Candidate pickCandidateToQuery() throws LensException {
+    Candidate cand = null;
     if (hasCubeInQuery()) {
-      if (candidateFactSets.size() > 0) {
-        facts = candidateFactSets.iterator().next();
-        log.info("Available candidate facts:{}, picking up {} for querying", candidateFactSets, facts);
+      if (candidates.size() > 0) {
+        cand = candidates.iterator().next();
+        log.info("Available Candidates:{}, picking up Candaidate: {} for querying", candidates, cand);
       } else {
         String reason = "";
-        if (!factPruningMsgs.isEmpty()) {
+        if (!storagePruningMsgs.isEmpty()) {
           ByteArrayOutputStream out = null;
           try {
             ObjectMapper mapper = new ObjectMapper();
             out = new ByteArrayOutputStream();
-            mapper.writeValue(out, factPruningMsgs.getJsonObject());
+            mapper.writeValue(out, storagePruningMsgs.getJsonObject());
             reason = out.toString("UTF-8");
           } catch (Exception e) {
             throw new LensException("Error writing fact pruning messages", e);
@@ -911,112 +912,103 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
             }
           }
         }
-        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", factPruningMsgs.toJsonObject());
-        throw new NoCandidateFactAvailableException(factPruningMsgs);
+        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", storagePruningMsgs.toJsonObject());
+        throw new NoCandidateFactAvailableException(storagePruningMsgs);
       }
     }
-    return facts;
+    return cand;
   }
 
   private HQLContextInterface hqlContext;
 
-  //TODO union : Delete this and use pickedCandidate
-  @Getter
-  private Collection<CandidateFact> pickedFacts;
-
   @Getter
   //TODO union : This will be the final Candidate . private Candidate pickedCandidate
   private Candidate pickedCandidate;
   @Getter
   private Collection<CandidateDim> pickedDimTables;
 
-  private void addRangeClauses(CandidateFact fact) throws LensException {
-    if (fact != null) {
+  private void addRangeClauses(StorageCandidate sc) throws LensException {
+    if (sc != null) {
       // resolve timerange positions and replace it by corresponding where clause
       for (TimeRange range : getTimeRanges()) {
-        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
-          String table = entry.getKey();
-          String rangeWhere = entry.getValue();
-          if (!StringUtils.isBlank(rangeWhere)) {
-            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere, conf);
-            range.getParent().setChild(range.getChildIndex(), rangeAST);
-          }
-          fact.getStorgeWhereClauseMap().put(table, HQLParser.parseExpr(getWhereString(), conf));
+        String rangeWhere = sc.getRangeToWhere().get(range);
+        if (!StringUtils.isBlank(rangeWhere)) {
+          ASTNode rangeAST = HQLParser.parseExpr(rangeWhere, conf);
+          range.getParent().setChild(range.getChildIndex(), rangeAST);
         }
+        sc.getQueryAst().setWhereAST(HQLParser.parseExpr(getWhereString(), conf));
       }
     }
   }
 
   public String toHQL() throws LensException {
-    Set<CandidateFact> cfacts = pickCandidateFactToQuery();
+    Candidate cand = pickCandidateToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
-    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
+    Set<StorageCandidate> scSet = new HashSet<>();
+    if (cand != null) {
+      scSet.addAll(CandidateUtil.getStorageCandidates(cand));
+    }
+    log.info("Candidate: {}, DimsToQuery: {}", cand, dimsToQuery);
     if (autoJoinCtx != null) {
       // prune join paths for picked fact and dimensions
-      autoJoinCtx.pruneAllPaths(cube, cfacts, dimsToQuery);
-    }
-
-    Map<CandidateFact, Set<Dimension>> factDimMap = new HashMap<>();
-    if (cfacts != null) {
-      if (cfacts.size() > 1) {
-        // copy ASTs for each fact
-        for (CandidateFact cfact : cfacts) {
-          cfact.copyASTs(this);
-          factDimMap.put(cfact, new HashSet<>(dimsToQuery.keySet()));
-        }
+      autoJoinCtx.pruneAllPaths(cube, scSet, dimsToQuery);
+    }
+
+    Map<StorageCandidate, Set<Dimension>> factDimMap = new HashMap<>();
+    if (cand != null) {
+      // copy ASTs for each storage candidate
+      for (StorageCandidate sc : scSet) {
+        // Set the default queryAST for StorageCandidate and copy child ASTs from cubeql.
+        // Later in the rewrite flow each Storage candidate will modify them accordingly.
+        sc.setQueryAst(DefaultQueryAST.fromStorageCandidate(sc, this));
+        CandidateUtil.copyASTs(this, sc.getQueryAst());
+        factDimMap.put(sc, new HashSet<>(dimsToQuery.keySet()));
       }
-      for (CandidateFact fact : cfacts) {
-        addRangeClauses(fact);
+      for (StorageCandidate sc : scSet) {
+        addRangeClauses(sc);
       }
     }
 
     // pick dimension tables required during expression expansion for the picked fact and dimensions
     Set<Dimension> exprDimensions = new HashSet<>();
-    if (cfacts != null) {
-      for (CandidateFact cfact : cfacts) {
-        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(cfact, dimsToQuery, cfacts.size() > 1 ? cfact : this);
+    if (!scSet.isEmpty()) {
+      for (StorageCandidate sc : scSet) {
+        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(sc, dimsToQuery, sc.getQueryAst());
         exprDimensions.addAll(factExprDimTables);
-        if (cfacts.size() > 1) {
-          factDimMap.get(cfact).addAll(factExprDimTables);
-        }
-      }
-      if (cfacts.size() > 1) {
-        havingAST = MultiFactHQLContext.pushDownHaving(havingAST, this, cfacts);
+        factDimMap.get(sc).addAll(factExprDimTables);
       }
     } else {
       // dim only query
       exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(exprDimensions));
-    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
+    log.info("StorageCandidates: {}, DimsToQuery: {}", scSet, dimsToQuery);
 
     // pick denorm tables for the picked fact and dimensions
     Set<Dimension> denormTables = new HashSet<>();
-    if (cfacts != null) {
-      for (CandidateFact cfact : cfacts) {
-        Set<Dimension> factDenormTables = deNormCtx.rewriteDenormctx(cfact, dimsToQuery, cfacts.size() > 1);
+    if (!scSet.isEmpty()) {
+      for (StorageCandidate sc : scSet) {
+        Set<Dimension> factDenormTables = deNormCtx.rewriteDenormctx(sc, dimsToQuery, !scSet.isEmpty());
         denormTables.addAll(factDenormTables);
-        if (cfacts.size() > 1) {
-          factDimMap.get(cfact).addAll(factDenormTables);
-        }
+        factDimMap.get(sc).addAll(factDenormTables);
       }
     } else {
       denormTables.addAll(deNormCtx.rewriteDenormctx(null, dimsToQuery, false));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(denormTables));
-    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
+    log.info("StorageCandidates: {}, DimsToQuery: {}", scSet, dimsToQuery);
     // Prune join paths once denorm tables are picked
     if (autoJoinCtx != null) {
       // prune join paths for picked fact and dimensions
-      autoJoinCtx.pruneAllPaths(cube, cfacts, dimsToQuery);
+      autoJoinCtx.pruneAllPaths(cube, scSet, dimsToQuery);
     }
     if (autoJoinCtx != null) {
       // add optional dims from Join resolver
       Set<Dimension> joiningTables = new HashSet<>();
-      if (cfacts != null && cfacts.size() > 1) {
-        for (CandidateFact cfact : cfacts) {
-          Set<Dimension> factJoiningTables = autoJoinCtx.pickOptionalTables(cfact, factDimMap.get(cfact), this);
-          factDimMap.get(cfact).addAll(factJoiningTables);
+      if (scSet != null && scSet.size() > 1) {
+        for (StorageCandidate sc : scSet) {
+          Set<Dimension> factJoiningTables = autoJoinCtx.pickOptionalTables(sc, factDimMap.get(sc), this);
+          factDimMap.get(sc).addAll(factJoiningTables);
           joiningTables.addAll(factJoiningTables);
         }
       } else {
@@ -1024,56 +1016,40 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
       }
       dimsToQuery.putAll(pickCandidateDimsToQuery(joiningTables));
     }
-    log.info("Picked Fact:{} dimsToQuery: {}", cfacts, dimsToQuery);
+    log.info("Picked StorageCandidates: {} DimsToQuery: {}", scSet, dimsToQuery);
     pickedDimTables = dimsToQuery.values();
-    pickedFacts = cfacts;
-    if (cfacts != null) {
-      if (cfacts.size() > 1) {
-        // Update ASTs for each fact
-        for (CandidateFact cfact : cfacts) {
-          cfact.updateASTs(this);
-        }
-        whereAST = MultiFactHQLContext.convertHavingToWhere(havingAST, this, cfacts, new DefaultAliasDecider());
-        for (CandidateFact cFact : cfacts) {
-          cFact.updateFromString(this, factDimMap.get(cFact), dimsToQuery);
-        }
+    pickedCandidate = cand;
+    if (!scSet.isEmpty()) {
+      for (StorageCandidate sc : scSet) {
+        sc.updateFromString(this, factDimMap.get(sc), dimsToQuery);
       }
-    }
-    if (cfacts == null || cfacts.size() == 1) {
-      updateFromString(cfacts == null ? null : cfacts.iterator().next(), dimsToQuery);
+    } else {
+      updateFromString(null, dimsToQuery);
     }
     //update dim filter with fact filter
-    if (cfacts != null && cfacts.size() > 0) {
-      for (CandidateFact cfact : cfacts) {
-        if (!cfact.getStorageTables().isEmpty()) {
-          for (String qualifiedStorageTable : cfact.getStorageTables()) {
-            String storageTable = qualifiedStorageTable.substring(qualifiedStorageTable.indexOf(".") + 1);
-            String where = getWhere(cfact, autoJoinCtx,
-                cfact.getStorageWhereClause(storageTable), getAliasForTableName(cfact.getBaseTable().getName()),
-                shouldReplaceDimFilterWithFactFilter(), storageTable, dimsToQuery);
-            cfact.getStorgeWhereStringMap().put(storageTable, where);
-          }
+    if (scSet != null && scSet.size() > 0) {
+      for (StorageCandidate sc : scSet) {
+        if (!sc.getStorageName().isEmpty()) {
+          String qualifiedStorageTable = sc.getStorageName();
+          String storageTable = qualifiedStorageTable.substring(qualifiedStorageTable.indexOf(".") + 1);
+          String where = getWhere(sc, autoJoinCtx,
+              sc.getQueryAst().getWhereAST(), getAliasForTableName(sc.getBaseTable().getName()),
+              shouldReplaceDimFilterWithFactFilter(), storageTable, dimsToQuery);
+          sc.setWhereString(where);
         }
       }
     }
-    hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
-    return hqlContext.toHQL();
-  }
-
-  private HQLContextInterface createHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap) throws LensException {
-    if (facts == null || facts.size() == 0) {
-      return new DimOnlyHQLContext(dimsToQuery, this, this);
-    } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
-      //create single fact with multiple storage context
-      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, this, this);
-    } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() == 1) {
-      CandidateFact fact = facts.iterator().next();
-      // create single fact context
-      return new SingleFactSingleStorageHQLContext(fact, null,
-        dimsToQuery, this, DefaultQueryAST.fromCandidateFact(fact, fact.getStorageTables().iterator().next(), this));
+
+    if (cand == null) {
+      hqlContext = new DimOnlyHQLContext(dimsToQuery, this, this);
+      return hqlContext.toHQL();
+    } else if (cand instanceof StorageCandidate) {
+      StorageCandidate sc = (StorageCandidate) cand;
+      sc.updateAnswerableSelectColumns(this);
+      return getInsertClause() + sc.toHQL();
     } else {
-      return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, this);
+      UnionQueryWriter uqc = new UnionQueryWriter(cand, this);
+      return getInsertClause() + uqc.toHQL();
     }
   }
 
@@ -1190,7 +1166,7 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
   public String getInsertClause() {
     ASTNode destTree = qb.getParseInfo().getDestForClause(clauseName);
     if (destTree != null && ((ASTNode) (destTree.getChild(0))).getToken().getType() != TOK_TMP_FILE) {
-      return "INSERT OVERWRITE" + HQLParser.getString(destTree);
+      return "INSERT OVERWRITE " + HQLParser.getString(destTree) + " ";
     }
     return "";
   }
@@ -1261,10 +1237,10 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
    * <p></p>
    * Prune a candidate set, if any of the fact is missing.
    *
-   * @param pruneCause
    */
   //TODO union : deprecated
   @Deprecated
+  /*
   public void pruneCandidateFactSet(CandidateTablePruneCode pruneCause) {
     // remove candidate fact sets that have missing facts
     for (Iterator<Set<CandidateFact>> i = candidateFactSets.iterator(); i.hasNext();) {
@@ -1278,7 +1254,7 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     // prune candidate facts
     pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED);
   }
-
+*/
   /**
    * Prune candidate fact with respect to available candidate fact sets.
    * <p></p>
@@ -1286,13 +1262,16 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
    *
    * @param pruneCause
    */
+/*
   public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCode pruneCause) {
     // remove candidate facts that are not part of any covering set
     pruneCandidateFactWithCandidateSet(new CandidateTablePruneCause(pruneCause));
   }
-
+*/
   //TODO union : deprecated
+  /*
   @Deprecated
+
   public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCause pruneCause) {
     // remove candidate facts that are not part of any covering set
     Set<CandidateFact> allCoveringFacts = new HashSet<CandidateFact>();
@@ -1308,7 +1287,7 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
       }
     }
   }
-
+*/
 
   public void addQueriedTimeDimensionCols(final String timeDimColName) {
 
@@ -1320,17 +1299,17 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     return ImmutableSet.copyOf(this.queriedTimeDimCols);
   }
 
-  private String getWhere(CandidateFact cfact, AutoJoinContext autoJoinCtx,
+  private String getWhere(StorageCandidate sc, AutoJoinContext autoJoinCtx,
                           ASTNode node, String cubeAlias,
                           boolean shouldReplaceDimFilter, String storageTable,
                           Map<Dimension, CandidateDim> dimToQuery) throws LensException {
     String whereString;
     if (autoJoinCtx != null && shouldReplaceDimFilter) {
       List<String> allfilters = new ArrayList<>();
-      getAllFilters(node, cubeAlias, allfilters, autoJoinCtx.getJoinClause(cfact), dimToQuery);
+      getAllFilters(node, cubeAlias, allfilters, autoJoinCtx.getJoinClause(sc), dimToQuery);
       whereString = StringUtils.join(allfilters, " and ");
     } else {
-      whereString = HQLParser.getString(cfact.getStorageWhereClause(storageTable));
+      whereString = HQLParser.getString(sc.getQueryAst().getWhereAST());
     }
     return whereString;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 3ff6070..abd909f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -150,13 +150,12 @@ public class CubeQueryRewriter {
     // Resolve candidate fact tables and dimension tables for columns queried
     rewriters.add(candidateTblResolver);
     // Resolve aggregations and generate base select tree
-    rewriters.add(new CandidateCoveringSetsResolver(conf));
-
-    //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates. Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
-    //TODO union: AggregateResolver,GroupbyResolver,FieldValidator before CoveringSetResolver
     rewriters.add(new AggregateResolver());
     rewriters.add(new GroupbyResolver(conf));
     rewriters.add(new FieldValidator());
+    rewriters.add(storageTableResolver);
+    //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates. Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
+    //TODO union: AggregateResolver,GroupbyResolver,FieldValidator before CoveringSetResolver
     // Resolve joins and generate base join tree
     rewriters.add(new JoinResolver(conf));
     // Do col life validation
@@ -165,10 +164,9 @@ public class CubeQueryRewriter {
     // in join and denorm resolvers
     //TODO union : this should be CoveringSetResolver now
     rewriters.add(candidateTblResolver);
-
+    rewriters.add(new CandidateCoveringSetsResolver(conf));
     // Phase 1: resolve fact tables.
     //TODO union: This phase 1 of storageTableResolver should happen before CoveringSetResolver
-    rewriters.add(storageTableResolver);
     if (lightFactFirst) {
       // Prune candidate tables for which denorm column references do not exist
       //TODO union: phase 2 of denormResolver needs to be moved before CoveringSetResolver

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
index 80ceae4..cd44235 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -18,10 +18,14 @@
  */
 package org.apache.lens.cube.parse;
 
+import lombok.Getter;
+import lombok.Setter;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
 
 public class DefaultAliasDecider implements AliasDecider {
+  @Getter
+  @Setter
   int counter = 0;
   private static final String ALIAS_PREFIX = "alias";
 

http://git-wip-us.apache.org/repos/asf/lens/blob/4af769ee/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
index c9993f3..17e202d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
@@ -33,7 +33,6 @@ public class DefaultQueryAST implements QueryAST {
   private String fromString;
   private String whereString;
 
-
   public String getSelectString() {
     return HQLParser.getString(selectAST);
   }
@@ -60,12 +59,12 @@ public class DefaultQueryAST implements QueryAST {
     return null;
   }
 
-  public static DefaultQueryAST fromCandidateFact(CandidateFact fact, String storageTable, QueryAST ast) throws
-    LensException {
+  public static DefaultQueryAST fromStorageCandidate(StorageCandidate sc, QueryAST ast) throws
+      LensException {
     return new DefaultQueryAST(ast.getSelectAST(),
-      null,
-      ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue(),
-      ast.getFromString(),
-      fact.getStorageWhereString(storageTable.substring(storageTable.indexOf(".") + 1)));
+        null,
+        ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue(),
+        ast.getFromString(),
+        sc.getWhereString());
   }
 }