You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2017/02/28 12:48:24 UTC

[6/8] lens git commit: LENS-1389: Back Merge with master and fix lens-cube tests

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 82113af..1daeea5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -25,11 +25,8 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
-import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
@@ -44,9 +41,6 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 class ExpressionResolver implements ContextRewriter {
 
-  public ExpressionResolver(Configuration conf) {
-  }
-
   static class ExpressionContext {
     @Getter
     private final ExprColumn exprCol;
@@ -61,7 +55,7 @@ class ExpressionResolver implements ContextRewriter {
     private Map<CandidateTable, Set<ExprSpecContext>> evaluableExpressions = new HashMap<>();
     private boolean hasMeasures = false;
 
-    public boolean hasMeasures() {
+    boolean hasMeasures() {
       return hasMeasures;
     }
 
@@ -151,11 +145,6 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, ExprSpecContext esc) throws LensException {
-      Set<ExprSpecContext> evalSet = evaluableExpressions.get(cTable);
-      if (evalSet == null) {
-        evalSet = new LinkedHashSet<ExprSpecContext>();
-        evaluableExpressions.put(cTable, evalSet);
-      }
       // add optional dimensions involved in expressions
       for (String table : esc.getTblAliasToColumns().keySet()) {
         if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
@@ -164,7 +153,7 @@ class ExpressionResolver implements ContextRewriter {
           esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
         }
       }
-      evalSet.add(esc);
+      evaluableExpressions.computeIfAbsent(cTable, k -> new LinkedHashSet<>()).add(esc);
     }
 
     Set<ASTNode> getAllASTNodes() {
@@ -185,41 +174,40 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     boolean isEvaluable(CandidateTable cTable) {
-      if (directlyAvailableIn.contains(cTable)) {
-        return true;
-      }
-      if (evaluableExpressions.get(cTable) == null) {
-        return false;
-      }
-      return !evaluableExpressions.get(cTable).isEmpty();
+      return directlyAvailableIn.contains(cTable)
+        || (evaluableExpressions.get(cTable) != null && !evaluableExpressions.get(cTable).isEmpty());
     }
   }
 
-  static class ExprSpecContext extends TracksQueriedColumns {
+  static class ExprSpecContext extends TracksQueriedColumns implements TrackDenormContext {
     private Set<ExprSpec> exprSpecs = new LinkedHashSet<>();
     @Getter
     @Setter
     private ASTNode finalAST;
     @Getter
     private Set<Dimension> exprDims = new HashSet<>();
+    @Getter
+    @Setter
+    private DenormalizationResolver.DenormalizationContext deNormCtx;
 
     ExprSpecContext(ExprSpec exprSpec, CubeQueryContext cubeql) throws LensException {
       // replaces table names in expression with aliases in the query
       finalAST = replaceAlias(exprSpec.copyASTNode(), cubeql);
       exprSpecs.add(exprSpec);
     }
-    public ExprSpecContext(ExprSpecContext nested, ExprSpec current, ASTNode node,
+    ExprSpecContext(ExprSpecContext nested, ExprSpec current, ASTNode node,
       CubeQueryContext cubeql) throws LensException {
       exprSpecs.addAll(nested.exprSpecs);
       exprSpecs.add(current);
       finalAST = replaceAlias(node, cubeql);
     }
-    public void replaceAliasInAST(CubeQueryContext cubeql)
+    void replaceAliasInAST(CubeQueryContext cubeql)
       throws LensException {
       AliasReplacer.extractTabAliasForCol(cubeql, this);
       finalAST = AliasReplacer.replaceAliases(finalAST, 0, cubeql.getColToTableAlias());
     }
 
+
     void resolveColumns(CubeQueryContext cubeql) throws LensException {
       // finds all columns and table aliases in the expression
       ColumnResolver.getColsForTree(cubeql, finalAST, this, false);
@@ -251,28 +239,46 @@ class ExpressionResolver implements ContextRewriter {
       return null;
     }
 
-    public boolean isValidInTimeRange(final TimeRange range) {
+    boolean isValidInTimeRange(final TimeRange range) {
       return isValidFrom(range.getFromDate()) && isValidTill(range.getToDate());
     }
 
-    public boolean isValidFrom(@NonNull final Date date) {
-      return (getStartTime() == null) ? true : date.equals(getStartTime()) || date.after(getStartTime());
+    boolean isValidFrom(@NonNull final Date date) {
+      return (getStartTime() == null) || (date.equals(getStartTime()) || date.after(getStartTime()));
     }
 
-    public boolean isValidTill(@NonNull final Date date) {
-      return (getEndTime() == null) ? true : date.equals(getEndTime()) || date.before(getEndTime());
+    boolean isValidTill(@NonNull final Date date) {
+      return (getEndTime() == null) || (date.equals(getEndTime()) || date.before(getEndTime()));
     }
 
     public String toString() {
       return HQLParser.getString(finalAST);
     }
+
   }
 
-  @AllArgsConstructor
+  @RequiredArgsConstructor
   @ToString
   private static class PickedExpression {
-    private String srcAlias;
-    private ExprSpecContext pickedCtx;
+    private final String srcAlias;
+    private final ExprSpecContext pickedCtx;
+    private transient ASTNode reWrittenAST = null;
+
+    /*
+    Initialized rewrittenAST as copy of final AST if boolean is passed. Copy would be required if finalAST gets
+    modified because of denormalization context.
+    Otherwise, it is final AST reference, without any copy.
+     */
+    void initRewrittenAST(boolean copyFinal) {
+      if (copyFinal) {
+        reWrittenAST = MetastoreUtil.copyAST(pickedCtx.getFinalAST());
+      } else {
+        reWrittenAST = pickedCtx.getFinalAST();
+      }
+    }
+    ASTNode getRewrittenAST() {
+      return reWrittenAST;
+    }
   }
 
   static class ExpressionResolverContext {
@@ -285,13 +291,7 @@ class ExpressionResolver implements ContextRewriter {
       this.cubeql = cubeql;
     }
     void addExpressionQueried(ExpressionContext expr) {
-      String exprCol = expr.getExprCol().getName().toLowerCase();
-      Set<ExpressionContext> ecSet = allExprsQueried.get(exprCol);
-      if (ecSet == null) {
-        ecSet = new LinkedHashSet<ExpressionContext>();
-        allExprsQueried.put(exprCol, ecSet);
-      }
-      ecSet.add(expr);
+      allExprsQueried.computeIfAbsent(expr.getExprCol().getName().toLowerCase(), k -> new LinkedHashSet<>()).add(expr);
     }
 
     boolean isQueriedExpression(String column) {
@@ -318,7 +318,7 @@ class ExpressionResolver implements ContextRewriter {
       throw new IllegalArgumentException("no expression available for " + expr + " alias:" + alias);
     }
 
-    public boolean hasMeasures(String expr, CubeInterface cube) {
+    boolean hasMeasures(String expr, CubeInterface cube) {
       String alias = cubeql.getAliasForTableName(cube.getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
       boolean hasMeasures = false;
@@ -337,7 +337,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     //updates all expression specs which are evaluable
-    public void updateEvaluables(String expr, CandidateTable cTable)
+    void updateEvaluables(String expr, CandidateTable cTable)
       throws LensException {
       String alias = cubeql.getAliasForTableName(cTable.getBaseTable().getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
@@ -356,7 +356,7 @@ class ExpressionResolver implements ContextRewriter {
           boolean isEvaluable = true;
           for (String col : columns) {
             if (!cTable.getColumns().contains(col.toLowerCase())) {
-              if (!cubeql.getDeNormCtx().addRefUsage(cTable, col, cTable.getBaseTable().getName())) {
+              if (!esc.getDeNormCtx().addRefUsage(cubeql, cTable, col, cTable.getBaseTable().getName())) {
                 // check if it is available as reference, if not expression is not evaluable
                 log.debug("{} = {} is not evaluable in {}", expr, esc, cTable);
                 isEvaluable = false;
@@ -373,54 +373,15 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     // checks if expr is evaluable
-    public boolean isEvaluable(String expr, CandidateTable cTable) {
+    boolean isEvaluable(String expr, CandidateTable cTable) {
       ExpressionContext ec = getExpressionContext(expr, cubeql.getAliasForTableName(cTable.getBaseTable().getName()));
       return ec.isEvaluable(cTable);
     }
 
-    /**
-     *
-     * @param exprs
-     * @return
-     */
-    public boolean allNotEvaluable(Set<String> exprs, CandidateTable cTable) {
-      for (String expr : exprs) {
-        if (isEvaluable(expr, cTable)) {
-          return false;
-        }
-      }
-      return true;
-    }
-
-    public Collection<String> coveringExpressions(Set<String> exprs, CandidateTable cTable) {
-      Set<String> coveringSet = new HashSet<String>();
-      for (String expr : exprs) {
-        if (isEvaluable(expr, cTable)) {
-          coveringSet.add(expr);
-        }
-      }
-      return coveringSet;
-    }
-
-    /**
-     * Returns true if all passed expressions are evaluable
-     *
-     * @param cTable
-     * @param exprs
-     * @return
-     */
-    public boolean allEvaluable(CandidateTable cTable, Set<String> exprs) {
-      for (String expr : exprs) {
-        if (!isEvaluable(expr, cTable)) {
-          return false;
-        }
-      }
-      return true;
-    }
-
-    public Set<Dimension> rewriteExprCtx(StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery,
       QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
+      log.info("Picking expressions for candidate {} ", sc);
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
         if (sc != null) {
@@ -432,16 +393,21 @@ class ExpressionResolver implements ContextRewriter {
             pickExpressionsForTable(cdim);
           }
         }
-        // Replace picked expressions in all the base trees
-        replacePickedExpressions(sc, queryAST);
         log.debug("Picked expressions: {}", pickedExpressions);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
+            pe.initRewrittenAST(pe.pickedCtx.deNormCtx.hasReferences());
+            exprDims.addAll(pe.pickedCtx.deNormCtx.rewriteDenormctxInExpression(cubeql, sc, dimsToQuery,
+              pe.getRewrittenAST()));
           }
         }
+        // Replace picked expressions in all the base trees
+        replacePickedExpressions(sc, queryAST);
       }
+
       pickedExpressions.clear();
+
       return exprDims;
     }
 
@@ -466,27 +432,25 @@ class ExpressionResolver implements ContextRewriter {
         return;
       }
       // Traverse the tree and resolve expression columns
-      HQLParser.bft(node, new ASTNodeVisitor() {
-        @Override
-        public void visit(TreeNode visited) throws LensException {
-          ASTNode node = visited.getNode();
-          int childcount = node.getChildCount();
-          for (int i = 0; i < childcount; i++) {
-            ASTNode current = (ASTNode) node.getChild(i);
-            if (current.getToken().getType() == DOT) {
-              // This is for the case where column name is prefixed by table name
-              // or table alias
-              // For example 'select fact.id, dim2.id ...'
-              // Right child is the column name, left child.ident is table name
-              ASTNode tabident = HQLParser.findNodeByPath(current, TOK_TABLE_OR_COL, Identifier);
-              ASTNode colIdent = (ASTNode) current.getChild(1);
-              String column = colIdent.getText().toLowerCase();
-
-              if (pickedExpressions.containsKey(column)) {
-                PickedExpression expr = getPickedExpression(column, tabident.getText().toLowerCase());
-                if (expr != null) {
-                  node.setChild(i, replaceAlias(expr.pickedCtx.finalAST, cubeql));
-                }
+      HQLParser.bft(node, visited -> {
+        ASTNode node1 = visited.getNode();
+        int childcount = node1.getChildCount();
+        for (int i = 0; i < childcount; i++) {
+          ASTNode current = (ASTNode) node1.getChild(i);
+          if (current.getToken().getType() == DOT) {
+            // This is for the case where column name is prefixed by table name
+            // or table alias
+            // For example 'select fact.id, dim2.id ...'
+            // Right child is the column name, left child.ident is table name
+            ASTNode tabident = HQLParser.findNodeByPath(current, TOK_TABLE_OR_COL, Identifier);
+            ASTNode colIdent = (ASTNode) current.getChild(1);
+            String column = colIdent.getText().toLowerCase();
+
+            if (pickedExpressions.containsKey(column)) {
+              assert tabident != null;
+              PickedExpression expr = getPickedExpression(column, tabident.getText().toLowerCase());
+              if (expr != null) {
+                node1.setChild(i, replaceAlias(expr.getRewrittenAST(), cubeql));
               }
             }
           }
@@ -515,12 +479,8 @@ class ExpressionResolver implements ContextRewriter {
               log.debug("{} is not directly evaluable in {}", ec, cTable);
               if (ec.evaluableExpressions.get(cTable) != null && !ec.evaluableExpressions.get(cTable).isEmpty()) {
                 // pick first evaluable expression
-                Set<PickedExpression> peSet = pickedExpressions.get(ecEntry.getKey());
-                if (peSet == null) {
-                  peSet = new HashSet<PickedExpression>();
-                  pickedExpressions.put(ecEntry.getKey(), peSet);
-                }
-                peSet.add(new PickedExpression(ec.srcAlias, ec.evaluableExpressions.get(cTable).iterator().next()));
+                pickedExpressions.computeIfAbsent(ecEntry.getKey(), k -> new HashSet<>())
+                  .add(new PickedExpression(ec.srcAlias, ec.evaluableExpressions.get(cTable).iterator().next()));
               }
             }
           }
@@ -549,6 +509,21 @@ class ExpressionResolver implements ContextRewriter {
             if (removed) {
               continue;
             }
+            // Remove expressions for which denormalized columns are no more reachable
+            esc.getDeNormCtx().pruneReferences(cubeql);
+            for (String table : esc.getDeNormCtx().getTableToRefCols().keySet()) {
+              Set<String> nonReachableFields = esc.getDeNormCtx().getNonReachableReferenceFields(table);
+              if (!nonReachableFields.isEmpty()) {
+                log.info("Removing expression {} as columns {} are not available", esc, nonReachableFields);
+                iterator.remove();
+                removedEsc.add(esc);
+                removed = true;
+                break;
+              }
+            }
+            if (removed) {
+              continue;
+            }
             //remove expressions which are not valid in the timerange queried
             // If an expression is defined as
             // ex = a + b // from t1 to t2;
@@ -614,7 +589,7 @@ class ExpressionResolver implements ContextRewriter {
       for (Map.Entry<String, Set<String>> entry : cubeql.getTblAliasToColumns().entrySet()) {
         String alias = entry.getKey();
         // skip default alias
-        if (alias == CubeQueryContext.DEFAULT_TABLE) {
+        if (Objects.equals(alias, CubeQueryContext.DEFAULT_TABLE)) {
           continue;
         }
         AbstractCubeTable tbl = cubeql.getCubeTableForAlias(alias);
@@ -703,24 +678,21 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws LensException {
-    ASTNode finalAST = MetastoreUtil.copyAST(expr);
-    HQLParser.bft(finalAST, new ASTNodeVisitor() {
-      @Override
-      public void visit(TreeNode visited) {
-        ASTNode node = visited.getNode();
-        ASTNode parent = null;
-        if (visited.getParent() != null) {
-          parent = visited.getParent().getNode();
-        }
-
-        if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && parent.getToken().getType() == DOT)) {
-          ASTNode current = (ASTNode) node.getChild(0);
-          if (current.getToken().getType() == Identifier) {
-            String tableName = current.getToken().getText().toLowerCase();
-            String alias = cubeql.getAliasForTableName(tableName);
-            if (!alias.equalsIgnoreCase(tableName)) {
-              node.setChild(0, new ASTNode(new CommonToken(HiveParser.Identifier, alias)));
-            }
+    final ASTNode finalAST = MetastoreUtil.copyAST(expr);
+    HQLParser.bft(finalAST, visited -> {
+      ASTNode node = visited.getNode();
+      ASTNode parent = null;
+      if (visited.getParent() != null) {
+        parent = visited.getParent().getNode();
+      }
+
+      if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && parent.getToken().getType() == DOT)) {
+        ASTNode current = (ASTNode) node.getChild(0);
+        if (current.getToken().getType() == Identifier) {
+          String tableName = current.getToken().getText().toLowerCase();
+          String alias = cubeql.getAliasForTableName(tableName);
+          if (!alias.equalsIgnoreCase(tableName)) {
+            node.setChild(0, new ASTNode(new CommonToken(HiveParser.Identifier, alias)));
           }
         }
       }
@@ -734,33 +706,30 @@ class ExpressionResolver implements ContextRewriter {
       return;
     }
     // Traverse the tree and resolve expression columns
-    HQLParser.bft(expr, new ASTNodeVisitor() {
-      @Override
-      public void visit(TreeNode visited) throws LensException {
-        ASTNode node = visited.getNode();
-        int childcount = node.getChildCount();
-        for (int i = 0; i < childcount; i++) {
-          ASTNode current = (ASTNode) node.getChild(i);
-          if (current.getToken().getType() == TOK_TABLE_OR_COL && (node != null && node.getToken().getType() != DOT)) {
-            // Take child ident.totext
-            ASTNode ident = (ASTNode) current.getChild(0);
-            String column = ident.getText().toLowerCase();
-            if (toReplace.equals(column)) {
-              node.setChild(i, MetastoreUtil.copyAST(columnAST));
-            }
-          } else if (current.getToken().getType() == DOT) {
-            // This is for the case where column name is prefixed by table name
-            // or table alias
-            // For example 'select fact.id, dim2.id ...'
-            // Right child is the column name, left child.ident is table name
-            ASTNode tabident = HQLParser.findNodeByPath(current, TOK_TABLE_OR_COL, Identifier);
-            ASTNode colIdent = (ASTNode) current.getChild(1);
-
-            String column = colIdent.getText().toLowerCase();
-
-            if (toReplace.equals(column)) {
-              node.setChild(i, MetastoreUtil.copyAST(columnAST));
-            }
+    HQLParser.bft(expr, visited -> {
+      ASTNode node = visited.getNode();
+      int childcount = node.getChildCount();
+      for (int i = 0; i < childcount; i++) {
+        ASTNode current = (ASTNode) node.getChild(i);
+        if (current.getToken().getType() == TOK_TABLE_OR_COL && node.getToken().getType() != DOT) {
+          // Take child ident.totext
+          ASTNode ident = (ASTNode) current.getChild(0);
+          String column = ident.getText().toLowerCase();
+          if (toReplace.equals(column)) {
+            node.setChild(i, MetastoreUtil.copyAST(columnAST));
+          }
+        } else if (current.getToken().getType() == DOT) {
+          // This is for the case where column name is prefixed by table name
+          // or table alias
+          // For example 'select fact.id, dim2.id ...'
+          // Right child is the column name, left child.ident is table name
+          ASTNode tabident = HQLParser.findNodeByPath(current, TOK_TABLE_OR_COL, Identifier);
+          ASTNode colIdent = (ASTNode) current.getChild(1);
+
+          String column = colIdent.getText().toLowerCase();
+
+          if (toReplace.equals(column)) {
+            node.setChild(i, MetastoreUtil.copyAST(columnAST));
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index 48af0c9..94f9c7d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -104,7 +104,6 @@ public class FieldValidator implements ContextRewriter {
           conflictingFields.addAll(queriedMsrs);
           throw new FieldsCannotBeQueriedTogetherException(new ConflictingFields(conflictingFields));
         } else {
-
           conflictingFields.addAll(queriedMsrs);
           throw new FieldsCannotBeQueriedTogetherException(new ConflictingFields(conflictingFields));
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 4664cde..4cae6f8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -182,7 +182,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
     private final long seconds;
     private final long milliseconds;
 
-    public TimeCovered(long ms) {
+    TimeCovered(long ms) {
       milliseconds = ms % (24 * 60 * 60 * 1000);
       long seconds = ms / (24 * 60 * 60 * 1000);
       this.seconds = seconds % (24 * 60 * 60);
@@ -194,9 +194,8 @@ class MaxCoveringFactResolver implements ContextRewriter {
     }
 
     public String toString() {
-      return new StringBuilder().append(days).append(" days, ").append(hours).append(" hours, ").append(minutes)
-        .append(" minutes, ").append(seconds).append(" seconds, ").append(milliseconds).append(" milliseconds.")
-        .toString();
+      return String.valueOf(days) + " days, " + hours + " hours, " + minutes +
+        " minutes, " + seconds + " seconds, " + milliseconds + " milliseconds.";
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 0c6465a..50ccab5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -18,8 +18,13 @@
  */
 package org.apache.lens.cube.parse;
 
+import static com.google.common.collect.Sets.newHashSet;
+import static java.util.stream.Collectors.toMap;
+
 import java.util.ArrayList;
+import java.util.Comparator;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 
@@ -47,10 +52,7 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
     HashMap<String, List<CandidateTablePruneCause>> detailedMessage = Maps.newHashMap();
     for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
       String key = StringUtils.join(entry.getValue(), ",");
-      if (detailedMessage.get(key) == null) {
-        detailedMessage.put(key, new ArrayList<CandidateTablePruneCause>());
-      }
-      detailedMessage.get(key).add(entry.getKey());
+      detailedMessage.computeIfAbsent(key, k -> new ArrayList<>()).add(entry.getKey());
     }
     return detailedMessage;
   }
@@ -69,10 +71,7 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
     HashMap<CandidateTablePruneCause, List<T>> result = new HashMap<CandidateTablePruneCause, List<T>>();
     for (T key : keySet()) {
       for (CandidateTablePruneCause value : get(key)) {
-        if (result.get(value) == null) {
-          result.put(value, new ArrayList<T>());
-        }
-        result.get(value).add(key);
+        result.computeIfAbsent(value, k -> new ArrayList<>()).add(key);
       }
     }
     return result;
@@ -93,12 +92,8 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
   }
 
   public String getBriefCause() {
-    CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
-    for (CandidateTablePruneCause cause : getReversed().keySet()) {
-      if (cause.getCause().compareTo(maxCause) > 0) {
-        maxCause = cause.getCause();
-      }
-    }
+    CandidateTablePruneCode maxCause = getReversed().keySet().stream()
+      .map(CandidateTablePruneCause::getCause).max(Comparator.naturalOrder()).get();
     Map<CandidateTablePruneCause, String> maxCauseMap = Maps.newHashMap();
     for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
       if (entry.getKey().getCause().equals(maxCause)) {
@@ -119,5 +114,11 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
   public static final class BriefAndDetailedError {
     private String brief;
     private HashMap<String, List<CandidateTablePruneCause>> details;
+
+    Map<HashSet<String>, List<CandidateTablePruneCause>> enhanced() {
+      return getDetails().entrySet().stream().collect(toMap(
+        o -> newHashSet(o.getKey().split(",")),
+        Map.Entry::getValue));
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index 832b7a4..310a655 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -124,7 +124,7 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
     for (String col : queriedDimAttrs) {
       if (!sc.getColumns().contains(col.toLowerCase())) {
         // check if it available as reference
-        if (!cubeQl.getDeNormCtx().addRefUsage(sc, col, cubeQl.getCube().getName())) {
+        if (!cubeQl.getDeNormCtx().addRefUsage(cubeQl, sc, col, cubeQl.getCube().getName())) {
           log.info("column {} is not available in fact table:{} ", col, sc);
           return false;
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
index fca86ec..25acb01 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
@@ -18,21 +18,44 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
-import static org.apache.lens.cube.parse.StorageUtil.*;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipUpdatePeriodCode;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.timeDimNotSupported;
+import static org.apache.lens.cube.parse.StorageUtil.getFallbackRange;
+import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
+import static org.apache.lens.cube.parse.StorageUtil.processCubeColForDataCompleteness;
+import static org.apache.lens.cube.parse.StorageUtil.processExpressionsForCompleteness;
 
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
-import java.util.*;
-
-import org.apache.lens.cube.metadata.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.TreeSet;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+import org.apache.lens.cube.metadata.CubeFactTable;
+import org.apache.lens.cube.metadata.CubeInterface;
+import org.apache.lens.cube.metadata.CubeMetastoreClient;
+import org.apache.lens.cube.metadata.DateUtil;
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.MetastoreUtil;
+import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metastore.DataCompletenessChecker;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.lib.Node;
-
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -67,6 +90,9 @@ public class StorageCandidate implements Candidate, CandidateTable {
    */
   @Getter
   private TreeSet<UpdatePeriod> validUpdatePeriods = new TreeSet<>();
+  @Getter
+  @Setter
+  Map<String, SkipUpdatePeriodCode> updatePeriodRejectionCause;
   private Configuration conf = null;
 
   /**
@@ -86,7 +112,6 @@ public class StorageCandidate implements Candidate, CandidateTable {
   @Getter
   @Setter
   private QueryAST queryAst;
-  private Map<Dimension, CandidateDim> dimensions;
   @Getter
   private Map<TimeRange, String> rangeToWhere = new LinkedHashMap<>();
   @Getter
@@ -153,17 +178,15 @@ public class StorageCandidate implements Candidate, CandidateTable {
   public StorageCandidate(StorageCandidate sc) throws LensException {
     this(sc.getCube(), sc.getFact(), sc.getStorageName(), sc.getCubeql());
     // Copy update periods.
-    for (UpdatePeriod updatePeriod : sc.getValidUpdatePeriods()) {
-      this.validUpdatePeriods.add(updatePeriod);
-    }
+    this.validUpdatePeriods.addAll(sc.getValidUpdatePeriods());
   }
 
   private void setMissingExpressions(Set<Dimension> queriedDims) throws LensException {
     setFromString(String.format("%s", getFromTable()));
     setWhereString(joinWithAnd(
-        genWhereClauseWithDimPartitions(whereString, queriedDims), cubeql.getConf().getBoolean(
-            CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
-            ? getPostSelectionWhereClause() : null));
+      genWhereClauseWithDimPartitions(whereString, queriedDims), cubeql.getConf().getBoolean(
+        CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
+        ? getPostSelectionWhereClause() : null));
     if (cubeql.getHavingAST() != null) {
       queryAst.setHavingAST(MetastoreUtil.copyAST(cubeql.getHavingAST()));
     }
@@ -195,7 +218,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
     return whereBuf.toString();
   }
 
-  static void appendWhereClause(StringBuilder filterCondition, String whereClause, boolean hasMore) {
+  private static void appendWhereClause(StringBuilder filterCondition, String whereClause, boolean hasMore) {
     // Make sure we add AND only when there are already some conditions in where
     // clause
     if (hasMore && !filterCondition.toString().isEmpty() && !StringUtils.isBlank(whereClause)) {
@@ -209,11 +232,11 @@ public class StorageCandidate implements Candidate, CandidateTable {
     }
   }
 
-  protected String getPostSelectionWhereClause() throws LensException {
+  private String getPostSelectionWhereClause() throws LensException {
     return null;
   }
 
-  public void setAnswerableMeasurePhraseIndices(int index) {
+  void setAnswerableMeasurePhraseIndices(int index) {
     answerableMeasurePhraseIndices.add(index);
   }
 
@@ -233,20 +256,20 @@ public class StorageCandidate implements Candidate, CandidateTable {
   /**
    * Update Orderby children with final alias used in select
    *
-   * @param orderby
-   * @param select
+   * @param orderby Order by AST
+   * @param select  Select AST
    */
   private void updateOrderByWithFinalAlias(ASTNode orderby, ASTNode select) {
     if (orderby == null) {
       return;
     }
-    for(Node orderbyNode : orderby.getChildren()) {
+    for (Node orderbyNode : orderby.getChildren()) {
       ASTNode orderBychild = (ASTNode) orderbyNode;
-      for(Node selectNode : select.getChildren()) {
+      for (Node selectNode : select.getChildren()) {
         ASTNode selectChild = (ASTNode) selectNode;
         if (selectChild.getChildCount() == 2) {
           if (HQLParser.getString((ASTNode) selectChild.getChild(0))
-              .equals(HQLParser.getString((ASTNode) orderBychild.getChild(0)))) {
+            .equals(HQLParser.getString((ASTNode) orderBychild.getChild(0)))) {
             ASTNode alias = new ASTNode((ASTNode) selectChild.getChild(1));
             orderBychild.replaceChildren(0, 0, alias);
             break;
@@ -356,7 +379,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
 
     if (!client.isStorageTableCandidateForRange(name, fromDate, toDate)) {
       cubeql.addStoragePruningMsg(this,
-          new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
+        new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
       return false;
     } else if (!client.partColExists(name, partCol)) {
       log.info("{} does not exist in {}", partCol, name);
@@ -370,7 +393,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
     Date floorToDate = DateUtil.getFloorDate(toDate, interval);
 
     int lookAheadNumParts = conf
-        .getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
+      .getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
 
     TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1).iterator();
     // add partitions from ceilFrom to floorTo
@@ -400,12 +423,12 @@ public class StorageCandidate implements Candidate, CandidateTable {
           // look-ahead
           // process time are present
           TimeRange.Iterable.Iterator processTimeIter = TimeRange.iterable(nextDt, lookAheadNumParts, interval, 1)
-              .iterator();
+            .iterator();
           while (processTimeIter.hasNext()) {
             Date pdt = processTimeIter.next();
             Date nextPdt = processTimeIter.peekNext();
             FactPartition processTimePartition = new FactPartition(processTimePartCol, pdt, interval, null,
-                partWhereClauseFormat);
+              partWhereClauseFormat);
             updatePartitionStorage(processTimePartition);
             if (processTimePartition.isFound()) {
               log.debug("Finer parts not required for look-ahead partition :{}", part);
@@ -419,15 +442,15 @@ public class StorageCandidate implements Candidate, CandidateTable {
                 // Get partitions for look ahead process time
                 log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt);
                 Set<FactPartition> processTimeParts = getPartitions(
-                    TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(processTimePartCol).build(),
-                    newset, true, failOnPartialData, missingPartitions);
+                  TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(processTimePartCol).build(),
+                  newset, true, failOnPartialData, missingPartitions);
                 log.debug("Look ahead partitions: {}", processTimeParts);
                 TimeRange timeRange = TimeRange.getBuilder().fromDate(dt).toDate(nextDt).build();
                 for (FactPartition pPart : processTimeParts) {
                   log.debug("Looking for finer partitions in pPart: {}", pPart);
                   for (Date date : timeRange.iterable(pPart.getPeriod(), 1)) {
                     FactPartition innerPart = new FactPartition(partCol, date, pPart.getPeriod(), pPart,
-                        partWhereClauseFormat);
+                      partWhereClauseFormat);
                     updatePartitionStorage(innerPart);
                     innerPart.setFound(pPart.isFound());
                     if (innerPart.isFound()) {
@@ -468,10 +491,10 @@ public class StorageCandidate implements Candidate, CandidateTable {
       }
     }
     return
-        getPartitions(fromDate, ceilFromDate, partCol, partitions, updatePeriods,
-            addNonExistingParts, failOnPartialData, missingPartitions)
-            && getPartitions(floorToDate, toDate, partCol, partitions, updatePeriods,
-              addNonExistingParts, failOnPartialData, missingPartitions);
+      getPartitions(fromDate, ceilFromDate, partCol, partitions, updatePeriods,
+        addNonExistingParts, failOnPartialData, missingPartitions)
+        && getPartitions(floorToDate, toDate, partCol, partitions, updatePeriods,
+        addNonExistingParts, failOnPartialData, missingPartitions);
   }
 
   /**
@@ -506,8 +529,8 @@ public class StorageCandidate implements Candidate, CandidateTable {
     boolean partColNotSupported = rangeParts.isEmpty();
     String storageTableName = getName();
 
-    if (storagePruningMsgs.containsKey(storageTableName)) {
-      List<CandidateTablePruneCause> causes = storagePruningMsgs.get(storageTableName);
+    if (storagePruningMsgs.containsKey(this)) {
+      List<CandidateTablePruneCause> causes = storagePruningMsgs.get(this);
       // Find the PART_COL_DOES_NOT_EXISTS
       for (CandidateTablePruneCause cause : causes) {
         if (cause.getCause().equals(CandidateTablePruneCode.PART_COL_DOES_NOT_EXIST)) {
@@ -709,11 +732,11 @@ public class StorageCandidate implements Candidate, CandidateTable {
     return getName();
   }
 
-  public void addValidUpdatePeriod(UpdatePeriod updatePeriod) {
+  void addValidUpdatePeriod(UpdatePeriod updatePeriod) {
     this.validUpdatePeriods.add(updatePeriod);
   }
 
-  public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
+  void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
     Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     this.dimsToQuery = dimsToQuery;
     String alias = cubeql.getAliasForTableName(cubeql.getCube().getName());
@@ -744,4 +767,57 @@ public class StorageCandidate implements Candidate, CandidateTable {
     }
     return ret;
   }
+
+  Set<UpdatePeriod> getAllUpdatePeriods() {
+    return getFact().getUpdatePeriods().get(getStorageName());
+  }
+  // TODO: move them to upper interfaces for complex candidates. Right now it's unused, so keeping it just here
+  public boolean isTimeRangeCoverable(TimeRange timeRange) {
+    return isTimeRangeCoverable(timeRange.getFromDate(), timeRange.getToDate(), getValidUpdatePeriods());
+  }
+
+  /**
+   * Is the time range coverable by given update periods.
+   * Extracts the max update period, then extracts maximum amount of range from the middle that this update
+   * period can cover. Then recurses on the ramaining ranges on the left and right side of the extracted chunk
+   * using one less update period.
+   * //TODO: add tests if the function is useful. Till then it's untested and unverified.
+   * @param fromDate  From date
+   * @param toDate    To date
+   * @param periods   Update periods to check
+   * @return          Whether time range is coverable by provided update periods or not.
+   */
+  private boolean isTimeRangeCoverable(Date fromDate, Date toDate, Set<UpdatePeriod> periods) {
+    UpdatePeriod interval = CubeFactTable.maxIntervalInRange(fromDate, toDate, periods);
+    if (fromDate.equals(toDate)) {
+      return true;
+    } else if (periods.isEmpty()) {
+      return false;
+    } else {
+      Set<UpdatePeriod> remaining = Sets.difference(periods, Sets.newHashSet(interval));
+      return interval != null
+        && isTimeRangeCoverable(fromDate, DateUtil.getCeilDate(fromDate, interval), remaining)
+        && isTimeRangeCoverable(DateUtil.getFloorDate(toDate, interval), toDate, remaining);
+    }
+  }
+
+  boolean isUpdatePeriodUseful(UpdatePeriod updatePeriod) {
+    return cubeql.getTimeRanges().stream().anyMatch(timeRange -> isUpdatePeriodUseful(timeRange, updatePeriod));
+  }
+
+  /**
+   * Is the update period useful for this time range. e.g. for a time range of hours and days, monthly
+   * and yearly update periods are useless. DAILY and HOURLY are useful
+   * @param timeRange       The time range
+   * @param updatePeriod    Update period
+   * @return                Whether it's useless
+   */
+  private boolean isUpdatePeriodUseful(TimeRange timeRange, UpdatePeriod updatePeriod) {
+    try {
+      timeRange.truncate(updatePeriod);
+      return true;
+    } catch (LensException e) {
+      return false;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 3029589..bc008ae 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -19,6 +19,7 @@
 package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.incompletePartitions;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.partitionColumnsMissing;
 
 import java.util.*;
 
@@ -46,12 +47,10 @@ class StorageTableResolver implements ContextRewriter {
   private final UpdatePeriod maxInterval;
   // TODO union : Remove this. All partitions are stored in the StorageCandidate.
   private final Map<String, Set<String>> nonExistingPartitions = new HashMap<>();
-  CubeMetastoreClient client;
+  private CubeMetastoreClient client;
   private PHASE phase;
-  private float completenessThreshold;
-  private String completenessPartCol;
 
-  public StorageTableResolver(Configuration conf) {
+  StorageTableResolver(Configuration conf) {
     this.conf = conf;
     this.supportedStorages = getSupportedStorages(conf);
     this.allStoragesSupported = (supportedStorages == null);
@@ -64,7 +63,6 @@ class StorageTableResolver implements ContextRewriter {
     } else {
       this.maxInterval = null;
     }
-    String formatStr = conf.get(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT);
     this.phase = PHASE.first();
   }
 
@@ -76,7 +74,7 @@ class StorageTableResolver implements ContextRewriter {
     return null;
   }
 
-  public boolean isStorageSupportedOnDriver(String storage) {
+  private boolean isStorageSupportedOnDriver(String storage) {
     return allStoragesSupported || supportedStorages.contains(storage);
   }
 
@@ -115,7 +113,7 @@ class StorageTableResolver implements ContextRewriter {
    * Each candidate in the set is a complex candidate. We will evaluate each one to get
    * all the partitions needed to answer the query.
    *
-   * @param cubeql
+   * @param cubeql cube query context
    */
   private void resolveStoragePartitions(CubeQueryContext cubeql) throws LensException {
     Iterator<Candidate> candidateIterator = cubeql.getCandidates().iterator();
@@ -125,7 +123,7 @@ class StorageTableResolver implements ContextRewriter {
       for (TimeRange range : cubeql.getTimeRanges()) {
         isComplete &= candidate.evaluateCompleteness(range, range, failOnPartialData);
       }
-      if (failOnPartialData &&  !isComplete) {
+      if (failOnPartialData && !isComplete) {
         candidateIterator.remove();
         log.info("Not considering candidate:{} as its data is not is not complete", candidate);
         Set<StorageCandidate> scSet = CandidateUtil.getStorageCandidates(candidate);
@@ -136,13 +134,19 @@ class StorageTableResolver implements ContextRewriter {
             cubeql.addStoragePruningMsg(sc, incompletePartitions(sc.getDataCompletenessMap()));
           }
         }
+      } else if (candidate.getParticipatingPartitions().isEmpty()
+        && candidate instanceof StorageCandidate
+        && ((StorageCandidate) candidate).getNonExistingPartitions().isEmpty()) {
+        candidateIterator.remove();
+        cubeql.addCandidatePruningMsg(candidate,
+          new CandidateTablePruneCause(CandidateTablePruneCode.NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE));
       }
     }
   }
 
 
   private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws LensException {
-    Set<Dimension> allDims = new HashSet<Dimension>(cubeql.getDimensions());
+    Set<Dimension> allDims = new HashSet<>(cubeql.getDimensions());
     for (Aliased<Dimension> dim : cubeql.getOptionalDimensions()) {
       allDims.add(dim.getObject());
     }
@@ -161,7 +165,7 @@ class StorageTableResolver implements ContextRewriter {
           i.remove();
           continue;
         }
-        Set<String> storageTables = new HashSet<String>();
+        Set<String> storageTables = new HashSet<>();
         Map<String, String> whereClauses = new HashMap<String, String>();
         boolean foundPart = false;
         // TODO union : We have to remove all usages of a deprecated class.
@@ -237,6 +241,7 @@ class StorageTableResolver implements ContextRewriter {
       assert (c instanceof StorageCandidate);
       StorageCandidate sc = (StorageCandidate) c;
       String storageTable = sc.getStorageName();
+      // first check: if the storage is supported on driver
       if (!isStorageSupportedOnDriver(storageTable)) {
         log.info("Skipping storage: {} as it is not supported", storageTable);
         cubeql.addStoragePruningMsg(sc, new CandidateTablePruneCause(CandidateTablePruneCode.UNSUPPORTED_STORAGE));
@@ -244,9 +249,8 @@ class StorageTableResolver implements ContextRewriter {
         continue;
       }
       String str = conf.get(CubeQueryConfUtil.getValidStorageTablesKey(sc.getFact().getName()));
-      List<String> validFactStorageTables = StringUtils.isBlank(str)
-                                            ? null
-                                            : Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
+      List<String> validFactStorageTables =
+        StringUtils.isBlank(str) ? null : Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
       storageTable = sc.getName();
       // Check if storagetable is in the list of valid storages.
       if (validFactStorageTables != null && !validFactStorageTables.contains(storageTable)) {
@@ -255,70 +259,91 @@ class StorageTableResolver implements ContextRewriter {
         it.remove();
         continue;
       }
-      boolean valid = false;
-      // There could be multiple causes for the same time range.
-      Set<CandidateTablePruneCause.CandidateTablePruneCode> pruningCauses = new HashSet<>();
-      for (TimeRange range : cubeql.getTimeRanges()) {
-        boolean columnInRange = client
-          .isStorageTableCandidateForRange(storageTable, range.getFromDate(), range.getToDate());
-        if (!columnInRange) {
-          pruningCauses.add(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
-          continue;
-        }
-        boolean partitionColumnExists = client.partColExists(storageTable, range.getPartitionColumn());
-        valid = partitionColumnExists;
-        if (!partitionColumnExists) {
-          String timeDim = cubeql.getBaseCube().getTimeDimOfPartitionColumn(range.getPartitionColumn());
-          TimeRange fallBackRange = StorageUtil.getFallbackRange(range, sc.getFact().getName(), cubeql);
-          if (fallBackRange == null) {
-            log.info("No partitions for range:{}. fallback range: {}", range, fallBackRange);
-            pruningCauses.add(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
-            continue;
-          }
-          valid = client.partColExists(storageTable, fallBackRange.getPartitionColumn())
-              && client.isStorageTableCandidateForRange(storageTable, fallBackRange.getFromDate(),
-                  fallBackRange.getToDate());
-          if (!valid) {
-            pruningCauses.add(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
-          }
-        }
-      }
-      if (!valid) {
-        it.remove();
-        for (CandidateTablePruneCode code : pruningCauses) {
-          cubeql.addStoragePruningMsg(sc, new CandidateTablePruneCause(code));
-        }
-        continue;
-      }
-
       List<String> validUpdatePeriods = CubeQueryConfUtil
         .getStringList(conf, CubeQueryConfUtil.getValidUpdatePeriodsKey(sc.getFact().getName(), sc.getStorageName()));
       boolean isStorageAdded = false;
       Map<String, SkipUpdatePeriodCode> skipUpdatePeriodCauses = new HashMap<>();
 
-      // Check for update period.
+      // Populate valid update periods.
       for (UpdatePeriod updatePeriod : sc.getFact().getUpdatePeriods().get(sc.getStorageName())) {
         if (maxInterval != null && updatePeriod.compareTo(maxInterval) > 0) {
-          log.info("Skipping update period {} for fact {}", updatePeriod, sc.getFact());
-          skipUpdatePeriodCauses.put(updatePeriod.toString(), SkipUpdatePeriodCode.QUERY_INTERVAL_BIGGER);
-          continue;
-        }
-        if (validUpdatePeriods != null && !validUpdatePeriods.contains(updatePeriod.name().toLowerCase())) {
-          log.info("Skipping update period {} for fact {} for storage {}", updatePeriod, sc.getFact(), storageTable);
+          // if user supplied max interval, all intervals larger than that are useless.
+          log.info("Skipping update period {} for candidate {} since it's more than max interval supplied({})",
+            updatePeriod, sc.getName(), maxInterval);
+          skipUpdatePeriodCauses.put(updatePeriod.toString(), SkipUpdatePeriodCode.UPDATE_PERIOD_BIGGER_THAN_MAX);
+        } else if (validUpdatePeriods != null && !validUpdatePeriods.contains(updatePeriod.name().toLowerCase())) {
+          // if user supplied valid update periods, other update periods are useless
+          log.info("Skipping update period {} for candidate {} for storage {} since it's invalid",
+            updatePeriod, sc.getName(), storageTable);
           skipUpdatePeriodCauses.put(updatePeriod.toString(), SkipUpdatePeriodCode.INVALID);
-          continue;
+        } else if (!sc.isUpdatePeriodUseful(updatePeriod)) {
+          // if the storage candidate finds this update useful to keep looking at the time ranges queried
+          skipUpdatePeriodCauses.put(updatePeriod.toString(),
+            SkipUpdatePeriodCode.QUERY_INTERVAL_SMALLER_THAN_UPDATE_PERIOD);
+        } else {
+          isStorageAdded = true;
+          sc.addValidUpdatePeriod(updatePeriod);
         }
-        isStorageAdded = true;
-        sc.addValidUpdatePeriod(updatePeriod);
       }
+      // this is just for documentation/debugging, so we can see why some update periods are skipped.
+      if (!skipUpdatePeriodCauses.isEmpty()) {
+        sc.setUpdatePeriodRejectionCause(skipUpdatePeriodCauses);
+      }
+      // if no update periods were added in previous section, we skip this storage candidate
       if (!isStorageAdded) {
-        cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.updatePeriodsRejected(skipUpdatePeriodCauses));
+        if (skipUpdatePeriodCauses.values().stream().allMatch(
+          SkipUpdatePeriodCode.QUERY_INTERVAL_SMALLER_THAN_UPDATE_PERIOD::equals)) {
+          // all update periods bigger than query range, it means time range not answerable.
+          cubeql.addStoragePruningMsg(sc,
+            new CandidateTablePruneCause(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
+        } else { // Update periods are rejected for multiple reasons.
+          cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.updatePeriodsRejected(skipUpdatePeriodCauses));
+        }
         it.remove();
+      } else {
+        Set<CandidateTablePruneCause> allPruningCauses = new HashSet<>(2);
+        for (TimeRange range : cubeql.getTimeRanges()) {
+          CandidateTablePruneCause pruningCauseForThisTimeRange = null;
+          if (!client.isStorageTableCandidateForRange(storageTable, range.getFromDate(), range.getToDate())) {
+            //This is the prune cause
+            pruningCauseForThisTimeRange =
+              new CandidateTablePruneCause(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
+          }
+          //Check partition (or fallback) column existence
+          else if (cubeql.shouldReplaceTimeDimWithPart()) {
+            if (!client.partColExists(storageTable, range.getPartitionColumn())) {
+              pruningCauseForThisTimeRange = partitionColumnsMissing(range.getPartitionColumn());
+              TimeRange fallBackRange = StorageUtil.getFallbackRange(range, sc.getFact().getName(), cubeql);
+              while (fallBackRange != null) {
+                pruningCauseForThisTimeRange = null;
+                if (!client.partColExists(storageTable, fallBackRange.getPartitionColumn())) {
+                  pruningCauseForThisTimeRange = partitionColumnsMissing(fallBackRange.getPartitionColumn());
+                  fallBackRange = StorageUtil.getFallbackRange(fallBackRange, sc.getFact().getName(), cubeql);
+                } else {
+                  if (!client.isStorageTableCandidateForRange(storageTable, fallBackRange.getFromDate(),
+                    fallBackRange.getToDate())) {
+                    pruningCauseForThisTimeRange =
+                      new CandidateTablePruneCause(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
+                  }
+                  break;
+                }
+              }
+            }
+          }
+
+          if(pruningCauseForThisTimeRange != null) {
+            allPruningCauses.add(pruningCauseForThisTimeRange);
+          }
+        }
+        if (!allPruningCauses.isEmpty()) {
+          it.remove();
+          cubeql.addStoragePruningMsg(sc, allPruningCauses.toArray(new CandidateTablePruneCause[0]));
+        }
       }
     }
   }
 
-  void addNonExistingParts(String name, Set<String> nonExistingParts) {
+  private void addNonExistingParts(String name, Set<String> nonExistingParts) {
     nonExistingPartitions.put(name, nonExistingParts);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
index 87f3ac2..f5cd540 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
@@ -166,7 +166,6 @@ public final class StorageUtil {
 
   /**
    * Get fallback range
-   * TODO union : Add method level comments
    *
    * @param range
    * @param factName

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
deleted file mode 100644
index e37db8b..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
-import java.util.*;
-
-import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
-import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.join.JoinPath;
-import org.apache.lens.cube.parse.join.AutoJoinContext;
-import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
-
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-public class TimeRangeChecker implements ContextRewriter {
-  public TimeRangeChecker(Configuration conf) {
-
-  }
-  @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    if (cubeql.getCube() == null) {
-      return;
-    }
-    doColLifeValidation(cubeql);
-  }
-  private void extractTimeRange(CubeQueryContext cubeql) throws LensException {
-    // get time range -
-    // Time range should be direct child of where condition
-    // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
-    // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
-    if (cubeql.getWhereAST() == null || cubeql.getWhereAST().getChildCount() < 1) {
-      throw new LensException(LensCubeErrorCode.NO_TIMERANGE_FILTER.getLensErrorInfo());
-    }
-    searchTimeRanges(cubeql.getWhereAST(), cubeql, null, 0);
-  }
-
-  private void searchTimeRanges(ASTNode root, CubeQueryContext cubeql, ASTNode parent, int childIndex)
-    throws LensException {
-    if (root == null) {
-      return;
-    } else if (root.getToken().getType() == TOK_FUNCTION) {
-      ASTNode fname = HQLParser.findNodeByPath(root, Identifier);
-      if (fname != null && CubeQueryContext.TIME_RANGE_FUNC.equalsIgnoreCase(fname.getText())) {
-        processTimeRangeFunction(cubeql, root, parent, childIndex);
-      }
-    } else {
-      for (int i = 0; i < root.getChildCount(); i++) {
-        ASTNode child = (ASTNode) root.getChild(i);
-        searchTimeRanges(child, cubeql, root, i);
-      }
-    }
-  }
-
-  private String getColumnName(ASTNode node) {
-    String column = null;
-    if (node.getToken().getType() == DOT) {
-      ASTNode colIdent = (ASTNode) node.getChild(1);
-      column = colIdent.getText().toLowerCase();
-    } else if (node.getToken().getType() == TOK_TABLE_OR_COL) {
-      // Take child ident.totext
-      ASTNode ident = (ASTNode) node.getChild(0);
-      column = ident.getText().toLowerCase();
-    }
-    return column;
-  }
-
-  private void processTimeRangeFunction(CubeQueryContext cubeql, ASTNode timenode, ASTNode parent, int childIndex)
-    throws LensException {
-    TimeRange.TimeRangeBuilder builder = TimeRange.getBuilder();
-    builder.astNode(timenode);
-    builder.parent(parent);
-    builder.childIndex(childIndex);
-
-    String timeDimName = getColumnName((ASTNode) timenode.getChild(1));
-
-    if (!cubeql.getCube().getTimedDimensions().contains(timeDimName)) {
-      throw new LensException(LensCubeErrorCode.NOT_A_TIMED_DIMENSION.getLensErrorInfo(), timeDimName);
-    }
-    // Replace timeDimName with column which is used for partitioning. Assume
-    // the same column
-    // is used as a partition column in all storages of the fact
-    timeDimName = cubeql.getPartitionColumnOfTimeDim(timeDimName);
-    builder.partitionColumn(timeDimName);
-
-    String fromDateRaw = PlanUtils.stripQuotes(timenode.getChild(2).getText());
-    String toDateRaw = null;
-    if (timenode.getChildCount() > 3) {
-      ASTNode toDateNode = (ASTNode) timenode.getChild(3);
-      if (toDateNode != null) {
-        toDateRaw = PlanUtils.stripQuotes(timenode.getChild(3).getText());
-      }
-    }
-    long currentTime = cubeql.getConf().getLong(LensConfConstants.QUERY_CURRENT_TIME_IN_MILLIS, 0);
-    Date now;
-    if (currentTime != 0) {
-      now = new Date(currentTime);
-    } else {
-      now = new Date();
-    }
-    builder.fromDate(DateUtil.resolveDate(fromDateRaw, now));
-    if (StringUtils.isNotBlank(toDateRaw)) {
-      builder.toDate(DateUtil.resolveDate(toDateRaw, now));
-    } else {
-      builder.toDate(now);
-    }
-
-    TimeRange range = builder.build();
-    range.validate();
-    cubeql.getTimeRanges().add(range);
-  }
-
-  //TODO union: This can be executed before finding CoveringSets but after denormresolver and joinresolver
-  private void doColLifeValidation(CubeQueryContext cubeql) throws LensException,
-      ColUnAvailableInTimeRangeException {
-    Set<String> cubeColumns = cubeql.getColumnsQueriedForTable(cubeql.getCube().getName());
-    if (cubeColumns == null || cubeColumns.isEmpty()) {
-      // Query doesn't have any columns from cube
-      return;
-    }
-
-    for (String col : cubeql.getColumnsQueriedForTable(cubeql.getCube().getName())) {
-      CubeColumn column = cubeql.getCube().getColumnByName(col);
-      for (TimeRange range : cubeql.getTimeRanges()) {
-        if (column == null) {
-          if (!cubeql.getCube().getTimedDimensions().contains(col)) {
-            throw new LensException(LensCubeErrorCode.NOT_A_CUBE_COLUMN.getLensErrorInfo(), col);
-          }
-          continue;
-        }
-        if (!column.isColumnAvailableInTimeRange(range)) {
-          throwException(column);
-        }
-      }
-    }
-
-    // Look at referenced columns through denormalization resolver
-    // and do column life validation
-    Map<String, Set<DenormalizationResolver.ReferencedQueriedColumn>> refCols =
-        cubeql.getDeNormCtx().getReferencedCols();
-    for (String col : refCols.keySet()) {
-      Iterator<DenormalizationResolver.ReferencedQueriedColumn> refColIter = refCols.get(col).iterator();
-      while (refColIter.hasNext()) {
-        DenormalizationResolver.ReferencedQueriedColumn refCol = refColIter.next();
-        for (TimeRange range : cubeql.getTimeRanges()) {
-          if (!refCol.col.isColumnAvailableInTimeRange(range)) {
-            log.debug("The refernced column: {} is not in the range queried", refCol.col.getName());
-            refColIter.remove();
-            break;
-          }
-        }
-      }
-    }
-
-    // Remove join paths that have columns with invalid life span
-    AutoJoinContext joinContext = cubeql.getAutoJoinCtx();
-    if (joinContext == null) {
-      return;
-    }
-    // Get cube columns which are part of join chain
-    Set<String> joinColumns = joinContext.getAllJoinPathColumnsOfTable((AbstractCubeTable) cubeql.getCube());
-    if (joinColumns == null || joinColumns.isEmpty()) {
-      return;
-    }
-
-    // Loop over all cube columns part of join paths
-    for (String col : joinColumns) {
-      CubeColumn column = cubeql.getCube().getColumnByName(col);
-      for (TimeRange range : cubeql.getTimeRanges()) {
-        if (!column.isColumnAvailableInTimeRange(range)) {
-          log.info("Timerange queried is not in column life for {}, Removing join paths containing the column", column);
-          // Remove join paths containing this column
-          Map<Aliased<Dimension>, List<JoinPath>> allPaths = joinContext.getAllPaths();
-
-          for (Aliased<Dimension> dimension : allPaths.keySet()) {
-            List<JoinPath> joinPaths = allPaths.get(dimension);
-            Iterator<JoinPath> joinPathIterator = joinPaths.iterator();
-
-            while (joinPathIterator.hasNext()) {
-              JoinPath path = joinPathIterator.next();
-              if (path.containsColumnOfTable(col, (AbstractCubeTable) cubeql.getCube())) {
-                log.info("Removing join path: {} as columns :{} is not available in the range", path, col);
-                joinPathIterator.remove();
-                if (joinPaths.isEmpty()) {
-                  // This dimension doesn't have any paths left
-                  throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
-                      "No valid join path available for dimension " + dimension + " which would satisfy time range "
-                          + range.getFromDate() + "-" + range.getToDate());
-                }
-              }
-            } // End loop to remove path
-
-          } // End loop for all paths
-        }
-      } // End time range loop
-    } // End column loop
-  }
-
-  private void throwException(CubeColumn column) throws ColUnAvailableInTimeRangeException {
-
-    final Long availabilityStartTime = (column.getStartTimeMillisSinceEpoch().isPresent())
-        ? column.getStartTimeMillisSinceEpoch().get() : null;
-
-    final Long availabilityEndTime = column.getEndTimeMillisSinceEpoch().isPresent()
-        ? column.getEndTimeMillisSinceEpoch().get() : null;
-
-    ColUnAvailableInTimeRange col = new ColUnAvailableInTimeRange(column.getName(), availabilityStartTime,
-        availabilityEndTime);
-
-    throw new ColUnAvailableInTimeRangeException(col);
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/TrackDenormContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TrackDenormContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TrackDenormContext.java
new file mode 100644
index 0000000..5592f70
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TrackDenormContext.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+interface TrackDenormContext {
+
+  /**
+   * Get denormalization context
+   *
+   * @return DenormalizationContext
+   */
+  DenormalizationResolver.DenormalizationContext getDeNormCtx();
+
+  /**
+   * Set denormalization context
+   *
+   * @param deNormCtx DenormalizationContext
+   */
+  void setDeNormCtx(DenormalizationResolver.DenormalizationContext deNormCtx);
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
index 8c81166..4f1f808 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
@@ -50,13 +50,11 @@ public class UnionQueryWriter {
   private Map<HQLParser.HashableASTNode, ASTNode> innerToOuterHavingASTs = new HashMap<>();
   private Map<String, ASTNode> storageCandidateToSelectAstMap = new HashMap<>();
   private AliasDecider aliasDecider = new DefaultAliasDecider();
-  private Candidate cand;
   private CubeQueryContext cubeql;
   Set<StorageCandidate> storageCandidates;
   public static final String DEFAULT_MEASURE = "0.0";
 
   public UnionQueryWriter(Candidate cand, CubeQueryContext cubeql) {
-    this.cand = cand;
     this.cubeql = cubeql;
     storageCandidates = CandidateUtil.getStorageCandidates(cand);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
index 9fecdbc..3c157ee 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
@@ -67,6 +67,9 @@ public class FactPartitionBasedQueryCostCalculator implements QueryCostCalculato
           }
           cost += allTableWeights * getNormalizedUpdatePeriodCost(partition.getPeriod(), driver);
         }
+      } else {
+        // increase cost for every dimtable partition
+        cost += 1.0;
       }
     }
     return cost;

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index e21dc2a..950534c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -28,6 +28,7 @@ import static org.apache.lens.server.api.util.LensUtil.getHashMap;
 import static org.testng.Assert.*;
 
 import java.text.SimpleDateFormat;
+
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
@@ -45,7 +46,10 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.*;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
@@ -965,6 +969,132 @@ public class TestCubeMetastoreClient {
     assertTrue(client.getAllFacts(altered).isEmpty());
   }
 
+  @Test(priority = 1)
+  public void testUpdatePeriodTableDescriptions() throws LensException, HiveException {
+    List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
+    String factName = "testFactWithUpdatePeriodTableDescriptions";
+
+    for (CubeMeasure measure : cubeMeasures) {
+      factColumns.add(measure.getColumn());
+    }
+    // add one dimension of the cube
+    factColumns.add(new FieldSchema("zipcode", "int", "zip"));
+    FieldSchema itPart = new FieldSchema("it", "string", "date part");
+    FieldSchema etPart = new FieldSchema("et", "string", "date part");
+    String[] partColNames = new String[] { getDatePartitionKey(), itPart.getName(), etPart.getName() };
+
+    StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      Lists.newArrayList(getDatePartition(), itPart, etPart),
+      Lists.newArrayList(getDatePartitionKey(), itPart.getName(), etPart.getName()));
+    StorageTableDesc s2 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      Lists.newArrayList(getDatePartition(), itPart, etPart),
+      Lists.newArrayList(getDatePartitionKey(), itPart.getName(), etPart.getName()));
+
+    Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, hourlyAndDaily, c2, hourlyAndDaily);
+    Map<String, StorageTableDesc> storageTables = getHashMap(HOURLY + "_" + c1, s1, DAILY + "_" + c1, s2, c2, s2);
+    Map<String, Map<UpdatePeriod, String>> storageUpdatePeriodMap = getHashMap(c1,
+      getHashMap(HOURLY, HOURLY + "_" + c1, DAILY, DAILY + "_" + c1), c2, getHashMap(HOURLY, c2, DAILY, c2));
+
+    CubeFactTable cubeFact = new CubeFactTable(CUBE_NAME, factName, factColumns, updatePeriods, 0L, null,
+      storageUpdatePeriodMap);
+    client.createCubeFactTable(CUBE_NAME, factName, factColumns, updatePeriods, 0L, null, storageTables,
+      storageUpdatePeriodMap);
+
+    assertTrue(client.tableExists(factName));
+    Table cubeTbl = client.getHiveTable(factName);
+    assertTrue(client.isFactTable(cubeTbl));
+    assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME));
+
+    // Assert for storage tables
+    for (String entry : storageTables.keySet()) {
+      String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
+      assertTrue(client.tableExists(storageTableName));
+    }
+
+    String c1TableNameHourly = getFactOrDimtableStorageTableName(cubeFact.getName(), HOURLY + "_" + c1);
+    String c2TableNameHourly = getFactOrDimtableStorageTableName(cubeFact.getName(), c2);
+
+    Table c1TableHourly = client.getHiveTable(c1TableNameHourly);
+    c1TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, getDatePartitionKey()),
+      StoreAllPartitionTimeline.class.getCanonicalName());
+    c1TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, itPart.getName()),
+      StoreAllPartitionTimeline.class.getCanonicalName());
+    c1TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, etPart.getName()),
+      StoreAllPartitionTimeline.class.getCanonicalName());
+    client.pushHiveTable(c1TableHourly);
+
+    Table c2TableHourly = client.getHiveTable(c2TableNameHourly);
+    c2TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, getDatePartitionKey()),
+      EndsAndHolesPartitionTimeline.class.getCanonicalName());
+    c2TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, itPart.getName()),
+      EndsAndHolesPartitionTimeline.class.getCanonicalName());
+    c2TableHourly.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY, etPart.getName()),
+      EndsAndHolesPartitionTimeline.class.getCanonicalName());
+    client.pushHiveTable(c2TableHourly);
+
+    assertSameTimelines(factName, new String[] { c1, c2 }, HOURLY, partColNames);
+
+    StoreAllPartitionTimeline timelineDtC1 = ((StoreAllPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c1, HOURLY, getDatePartitionKey()));
+    StoreAllPartitionTimeline timelineItC1 = ((StoreAllPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c1, HOURLY, itPart.getName()));
+    StoreAllPartitionTimeline timelineEtC1 = ((StoreAllPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c1, HOURLY, etPart.getName()));
+    EndsAndHolesPartitionTimeline timelineDt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c2, HOURLY, getDatePartitionKey()));
+    EndsAndHolesPartitionTimeline timelineIt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c2, HOURLY, itPart.getName()));
+    EndsAndHolesPartitionTimeline timelineEt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c2, HOURLY, etPart.getName()));
+
+    StoreAllPartitionTimeline timelineC1 = ((StoreAllPartitionTimeline) client.partitionTimelineCache
+      .get(factName, c1, HOURLY, getDatePartitionKey()));
+
+    Map<String, Date> timeParts1 = getTimePartitionByOffsets(getDatePartitionKey(), 0, itPart.getName(), 0,
+      etPart.getName(), 0);
+    StoragePartitionDesc partSpec1 = new StoragePartitionDesc(cubeFact.getName(), timeParts1, null, HOURLY);
+
+    Map<String, Date> timeParts2 = getTimePartitionByOffsets(getDatePartitionKey(), 0, etPart.getName(), 1);
+    Map<String, String> nonTimeSpec = getHashMap(itPart.getName(), "default");
+    final StoragePartitionDesc partSpec2 = new StoragePartitionDesc(cubeFact.getName(), timeParts2, nonTimeSpec,
+      HOURLY);
+
+    Map<String, Date> timeParts3 = getTimePartitionByOffsets(getDatePartitionKey(), 0, etPart.getName(), 0);
+    final StoragePartitionDesc partSpec3 = new StoragePartitionDesc(cubeFact.getName(), timeParts3, nonTimeSpec,
+      HOURLY);
+
+    client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c1, CubeTableType.FACT);
+    client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c2, CubeTableType.FACT);
+    PartitionTimeline timeline1Temp = client.partitionTimelineCache.get(factName, c1, HOURLY, getDatePartitionKey());
+    PartitionTimeline timeline2Temp = client.partitionTimelineCache.get(factName, c2, HOURLY, getDatePartitionKey());
+
+    assertEquals(timeline1Temp.getClass(), StoreAllPartitionTimeline.class);
+    assertEquals(timeline2Temp.getClass(), EndsAndHolesPartitionTimeline.class);
+
+    assertEquals(client.getAllParts(c1TableNameHourly).size(), 3);
+    assertEquals(client.getAllParts(c2TableNameHourly).size(), 3);
+
+    assertSameTimelines(factName, new String[] { c1, c2 }, HOURLY, partColNames);
+
+    assertTimeline(timelineDt, timelineDtC1, HOURLY, 0, 0);
+    assertTimeline(timelineEt, timelineEtC1, HOURLY, 0, 1);
+    assertTimeline(timelineIt, timelineItC1, HOURLY, 0, 0);
+
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c2, etPart.getName()));
+
+    assertNoPartitionNamedLatest(c1TableNameHourly, partColNames);
+    assertNoPartitionNamedLatest(c2TableNameHourly, partColNames);
+
+    client.dropFact(factName, true);
+    assertFalse(client.tableExists(factName));
+    for (String entry : storageTables.keySet()) {
+      String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
+      assertFalse(client.tableExists(storageTableName));
+    }
+  }
+
   @Test(priority = 2)
   public void testAlterDerivedCube() throws Exception {
     String name = "alter_derived_cube";
@@ -1238,7 +1368,10 @@ public class TestCubeMetastoreClient {
     s1.setFieldDelim(":");
     storageTables.put(c1, s1);
     storageTables.put(c4, s1);
-    factTable.addStorage(c4, hourlyAndDaily);
+    Map<UpdatePeriod, String> updatePeriodStoragePrefix = new HashMap<>();
+    updatePeriodStoragePrefix.put(HOURLY, c4);
+    updatePeriodStoragePrefix.put(DAILY, c4);
+    factTable.addStorage(c4, hourlyAndDaily, updatePeriodStoragePrefix);
     client.alterCubeFactTable(factName, factTable, storageTables, new HashMap<String, String>());
     CubeFactTable altered2 = client.getCubeFact(factName);
     assertTrue(client.tableExists(c1TableName));
@@ -1261,7 +1394,12 @@ public class TestCubeMetastoreClient {
     assertTrue(client.tableExists(c4TableName));
 
     // add storage
-    client.addStorage(altered2, c3, hourlyAndDaily, s1);
+    updatePeriodStoragePrefix.clear();
+    updatePeriodStoragePrefix.put(HOURLY, c3);
+    updatePeriodStoragePrefix.put(DAILY, c3);
+    Map<String, StorageTableDesc> storageTableDescMap = new HashMap<>();
+    storageTableDescMap.put(c3, s1);
+    client.addStorage(altered2, c3, hourlyAndDaily, storageTableDescMap, updatePeriodStoragePrefix);
     CubeFactTable altered3 = client.getCubeFact(factName);
     assertTrue(altered3.getStorages().contains("C3"));
     assertTrue(altered3.getUpdatePeriods().get("C3").equals(hourlyAndDaily));
@@ -1517,14 +1655,16 @@ public class TestCubeMetastoreClient {
     for (Partition partition : c1Parts) {
       partition.setLocation("blah");
       partition.setBucketCount(random.nextInt());
-      client.updatePartition(factName, c1, partition);
+      client.updatePartition(factName, c1, partition, HOURLY);
     }
     assertSamePartitions(client.getAllParts(c1TableName), c1Parts);
     for (Partition partition : c2Parts) {
       partition.setLocation("blah");
       partition.setBucketCount(random.nextInt());
     }
-    client.updatePartitions(factName, c2, c2Parts);
+    Map<UpdatePeriod, List<Partition>> partitionMap = new HashMap<>();
+    partitionMap.put(HOURLY, c2Parts);
+    client.updatePartitions(factName, c2, partitionMap);
     assertSamePartitions(client.getAllParts(c2TableName), c2Parts);
 
     assertSameTimelines(factName, storages, HOURLY, partColNames);
@@ -1998,7 +2138,6 @@ public class TestCubeMetastoreClient {
       timePartCols);
     Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, updates);
     Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1);
-
     CubeFactTable cubeFactWithParts = new CubeFactTable(CUBE_NAME, factNameWithPart, factColumns, updatePeriods);
 
     // create cube fact

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 194ab7c..94d4b40 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -585,9 +585,10 @@ public class CubeTestSetup {
       "dim3 refer", "dim3chain", "id", null, null, 0.0));
     cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("cityname", "string", "city name"),
       "city name", "cubecity", "name", null, null, 0.0));
-    // union join context
     cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema(prefix + "cityname", "string", prefix + "city name"),
-        prefix + "city name", "cubeCityJoinUnionCtx", "name", null, null, 0.0));
+      prefix + "city name", "cubeCityJoinUnionCtx", "name", null, null, 0.0));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("statename_cube", "string", "state name"),
+      "state name", "cubestate", "name", null, null, 0.0));
     List<ChainRefCol> references = new ArrayList<>();
     references.add(new ChainRefCol("timedatechain1", "full_date"));
     references.add(new ChainRefCol("timehourchain1", "full_hour"));
@@ -602,6 +603,8 @@ public class CubeTestSetup {
       "City1", null, null, null));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cityid2", "int", "id to city"),
       "City2", null, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("concatedcitystate", "string", "citystate"),
+      "CityState", null, null, null));
 
     Map<String, JoinChain> joinChains = new HashMap<>();
     addCubeChains(joinChains, TEST_CUBE_NAME);
@@ -663,7 +666,11 @@ public class CubeTestSetup {
     exprs.add(new ExprColumn(new FieldSchema("newexpr", "string", "expression which non existing colun"),
       "new measure expr", "myfun(newmeasure)"));
     exprs.add(new ExprColumn(new FieldSchema("cityAndState", "String", "city and state together"), "City and State",
-      "concat(cubecity.name, \":\", cubestate.name)"));
+      new ExprSpec("concat(cityname, \":\", statename_cube)", null, null),
+      new ExprSpec("substr(concatedcitystate, 10)", null, null)));
+    exprs.add(new ExprColumn(new FieldSchema("cityAndStateNew", "String", "city and state together"), "City and State",
+      new ExprSpec("concat(cityname, \":\", statename_cube)", null, TWO_MONTHS_BACK),
+      new ExprSpec("substr(concatedcitystate, 10)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("cityStateName", "String", "city state"), "City State",
       "concat('CityState:', cubecity.statename)"));
     exprs.add(new ExprColumn(new FieldSchema("isIndia", "String", "is indian city/state"), "Is Indian City/state",
@@ -2120,6 +2127,7 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("countryid", "int", "country id"));
     factColumns.add(new FieldSchema("dim1", "string", "dim1"));
     factColumns.add(new FieldSchema("dim2", "int", "dim2"));
+    factColumns.add(new FieldSchema("concatedCityState", "string", "citystate"));
 
     Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();

http://git-wip-us.apache.org/repos/asf/lens/blob/2aaf6e0a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index 1a5bd0d..44bf512 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information