You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/05/19 06:00:06 UTC

incubator-lens git commit: LENS-204 : Support union across tables in cube rewriting (Sushil Mohanty via amareshwari)

Repository: incubator-lens
Updated Branches:
  refs/heads/master a6c40f4d2 -> ab22a24b5


LENS-204 : Support union across tables in cube rewriting (Sushil Mohanty via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/ab22a24b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/ab22a24b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/ab22a24b

Branch: refs/heads/master
Commit: ab22a24b5f9a567cc92102b8e57cbc4c314242fa
Parents: a6c40f4
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue May 19 09:29:33 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue May 19 09:29:33 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/CandidateFact.java   |  44 ++++---
 .../lens/cube/parse/CubeQueryContext.java       |   7 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |   6 +
 .../lens/cube/parse/SingleFactHQLContext.java   |  66 +++++-----
 .../parse/SingleFactMultiStorageHQLContext.java |  61 +++++++++
 .../lens/cube/parse/StorageTableResolver.java   |  38 ++++++
 .../apache/lens/cube/parse/UnionHQLContext.java |  88 +++++++++++++
 .../apache/lens/cube/parse/CubeTestSetup.java   |  86 +++++++++++-
 .../lens/cube/parse/TestCubeRewriter.java       | 131 +++++++++++--------
 9 files changed, 422 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 31eb113..84e5341 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -55,7 +55,6 @@ public class CandidateFact implements CandidateTable {
   public static final Log LOG = LogFactory.getLog(CandidateFact.class.getName());
   final CubeFactTable fact;
   @Getter
-  @Setter
   private Set<String> storageTables;
   // flag to know if querying multiple storage tables is enabled for this fact
   @Getter
@@ -68,7 +67,6 @@ public class CandidateFact implements CandidateTable {
   @Getter
   private final Map<TimeRange, String> rangeToWhereClause = Maps.newHashMap();
 
-  private boolean dbResolved = false;
   private CubeInterface baseTable;
   private ASTNode selectAST;
   private ASTNode whereAST;
@@ -78,6 +76,14 @@ public class CandidateFact implements CandidateTable {
   private final List<Integer> selectIndices = Lists.newArrayList();
   private final List<Integer> dimFieldIndices = Lists.newArrayList();
   private Collection<String> columns;
+  @Getter
+  private final Map<String, String> storgeWhereClauseMap = new HashMap<String, String>();
+  @Getter
+  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap =
+      new HashMap<TimeRange, Map<String, LinkedHashSet<FactPartition>>>();
+  @Getter
+  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap =
+      new HashMap<TimeRange, Map<String, String>>();
 
   CandidateFact(CubeFactTable fact, CubeInterface cube) {
     this.fact = fact;
@@ -145,6 +151,10 @@ public class CandidateFact implements CandidateTable {
     updateTimeRanges(this.whereAST, null, 0);
   }
 
+  public String getWhereClause(String storageTable) {
+    return getStorgeWhereClauseMap().get(storageTable);
+  }
+
   public void updateTimeranges(CubeQueryContext cubeql) throws SemanticException {
     // Update WhereAST with range clause
     // resolve timerange positions and replace it by corresponding where clause
@@ -243,21 +253,24 @@ public class CandidateFact implements CandidateTable {
     return cubeColsInExpr;
   }
 
+  @Override
   public String getStorageString(String alias) {
-    if (!dbResolved) {
-      String database = SessionState.get().getCurrentDatabase();
-      // Add database name prefix for non default database
-      if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
-        Set<String> storageTbls = new HashSet<String>();
-        Iterator<String> names = storageTables.iterator();
-        while (names.hasNext()) {
-          storageTbls.add(database + "." + names.next());
-        }
-        this.storageTables = storageTbls;
+    return StringUtils.join(storageTables, ",") + " " + alias;
+  }
+
+  public void setStorageTables(Set<String> storageTables) {
+    String database = SessionState.get().getCurrentDatabase();
+    // Add database name prefix for non default database
+    if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
+      Set<String> storageTbls = new HashSet<String>();
+      Iterator<String> names = storageTables.iterator();
+      while (names.hasNext()) {
+        storageTbls.add(database + "." + names.next());
       }
-      dbResolved = true;
+      this.storageTables = storageTbls;
+    } else {
+      this.storageTables = storageTables;
     }
-    return StringUtils.join(storageTables, ",") + " " + alias;
   }
 
   @Override
@@ -394,9 +407,6 @@ public class CandidateFact implements CandidateTable {
     Set<String> cubeTimeDimensions = baseTable.getTimedDimensions();
     Set<String> timePartDimensions = new HashSet<String>();
     String singleStorageTable = storageTables.iterator().next();
-    if (!dbResolved) {
-      singleStorageTable = SessionState.get().getCurrentDatabase() + "." + singleStorageTable;
-    }
     List<FieldSchema> partitionKeys = null;
     try {
       partitionKeys = query.getMetastoreClient().getTable(singleStorageTable).getPartitionKeys();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 230a2ee..38b6429 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -840,8 +840,11 @@ public class CubeQueryContext {
     Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws SemanticException {
     if (facts == null || facts.size() == 0) {
       return new DimOnlyHQLContext(dimsToQuery, query);
-    } else if (facts.size() == 1) {
-      // create singlefact context
+    } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
+      //create single fact with multiple storage context
+      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, query);
+    } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() == 1) {
+      // create single fact context
       return new SingleFactHQLContext(facts.iterator().next(), dimsToQuery, query);
     } else {
       return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, query);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 785b0e4..9a2a44a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -43,6 +43,12 @@ class DimOnlyHQLContext extends DimHQLContext {
       query.getHavingTree(), query.getLimitValue());
   }
 
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, String whereClause)
+    throws SemanticException {
+    super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(), whereClause, query.getGroupByTree(), query
+        .getOrderByTree(), query.getHavingTree(), query.getLimitValue());
+  }
+
   public String toHQL() throws SemanticException {
     return query.getInsertClause() + super.toHQL();
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index e14fcfa..6f6d2e4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -39,7 +39,8 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
 
   public static final Log LOG = LogFactory.getLog(SingleFactHQLContext.class.getName());
 
-  private CandidateFact fact;
+  private final CandidateFact fact;
+  private String storageAlias;
 
   SingleFactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
     throws SemanticException {
@@ -47,6 +48,14 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
     this.fact = fact;
   }
 
+  SingleFactHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
+      CubeQueryContext query, String whereClause) throws SemanticException {
+    super(dimsToQuery, query, whereClause);
+    this.fact = fact;
+    this.storageAlias = storageAlias;
+  }
+
+
   public CandidateFact getFactToQuery() {
     return fact;
   }
@@ -56,46 +65,41 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
       // resolve timerange positions and replace it by corresponding where
       // clause
       for (TimeRange range : query.getTimeRanges()) {
-        String rangeWhere = fact.getRangeToWhereClause().get(range);
-        if (!StringUtils.isBlank(rangeWhere)) {
-          ASTNode rangeAST;
-          try {
-            rangeAST = HQLParser.parseExpr(rangeWhere);
-          } catch (ParseException e) {
-            throw new SemanticException(e);
+        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
+          String table = entry.getValue();
+          String rangeWhere = entry.getKey();
+
+          if (!StringUtils.isBlank(rangeWhere)) {
+            ASTNode rangeAST;
+            try {
+              rangeAST = HQLParser.parseExpr(rangeWhere);
+            } catch (ParseException e) {
+              throw new SemanticException(e);
+            }
+            rangeAST.setParent(range.getParent());
+            range.getParent().setChild(range.getChildIndex(), rangeAST);
           }
-          rangeAST.setParent(range.getParent());
-          range.getParent().setChild(range.getChildIndex(), rangeAST);
+          fact.getStorgeWhereClauseMap().put(table, query.getWhereTree());
         }
       }
     }
   }
 
-  private final String unionQueryFormat = "SELECT * FROM %s";
-
-  String getUnionQueryFormat() {
-    StringBuilder queryFormat = new StringBuilder();
-    queryFormat.append(unionQueryFormat);
-    if (getQuery().getGroupByTree() != null) {
-      queryFormat.append(" GROUP BY %s");
-    }
-    if (getQuery().getHavingTree() != null) {
-      queryFormat.append(" HAVING %s");
-    }
-    if (getQuery().getOrderByTree() != null) {
-      queryFormat.append(" ORDER BY %s");
-    }
-    if (getQuery().getLimitValue() != null) {
-      queryFormat.append(" LIMIT %s");
-    }
-    return queryFormat.toString();
-  }
 
+  @Override
   protected String getFromTable() throws SemanticException {
     if (getQuery().getAutoJoinCtx() != null && getQuery().getAutoJoinCtx().isJoinsResolved()) {
-      return fact.getStorageString(getQuery().getAliasForTableName(getQuery().getCube().getName()));
+      if (storageAlias != null) {
+        return storageAlias;
+      } else {
+        return fact.getStorageString(getQuery().getAliasForTableName(getQuery().getCube().getName()));
+      }
     } else {
-      return getQuery().getQBFromString(fact, getDimsToQuery());
+      if (fact.getStorageTables().size() == 1) {
+        return getQuery().getQBFromString(fact, getDimsToQuery());
+      } else {
+        return storageAlias;
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
new file mode 100644
index 0000000..9d1d9f2
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.lens.cube.metadata.Dimension;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import lombok.Getter;
+
+
+public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
+
+  public static final Log LOG = LogFactory.getLog(SingleFactMultiStorageHQLContext.class.getName());
+
+  @Getter
+  private CubeQueryContext query = null;
+  private CandidateFact fact = null;
+
+  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
+    throws SemanticException {
+    this.query = query;
+    this.fact = fact;
+    setUnionContexts(fact, dimsToQuery, query);
+  }
+
+  private void setUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
+    throws SemanticException {
+    hqlContexts = new ArrayList<HQLContextInterface>();
+    String alias = getQuery().getAliasForTableName(getQuery().getCube().getName());
+    for (String storageTable : fact.getStorageTables()) {
+      SingleFactHQLContext ctx = new SingleFactHQLContext(fact, storageTable + " " + alias, dimsToQuery, query,
+          fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)));
+      hqlContexts.add(ctx);
+    }
+    super.setHqlContexts(hqlContexts);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index bdc9855..5d0e15b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -352,6 +352,7 @@ class StorageTableResolver implements ContextRewriter {
   private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws SemanticException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator();
+    Map<TimeRange, String> whereClasueForFallback = new LinkedHashMap<TimeRange, String>();
     while (i.hasNext()) {
       CandidateFact cfact = i.next();
       List<FactPartition> answeringParts = new ArrayList<FactPartition>();
@@ -386,11 +387,26 @@ class StorageTableResolver implements ContextRewriter {
             break;
           }
         }
+        whereClasueForFallback.put(range, extraWhereClause.toString());
         if (rangeParts.isEmpty()) {
           LOG.info("No partitions for fallback range:" + range);
           noPartsForRange = true;
           continue;
         }
+        // If multiple storage tables are part of the same fact,
+        // capture range->storage->partitions
+        Map<String, LinkedHashSet<FactPartition>> tablePartMap = new HashMap<String, LinkedHashSet<FactPartition>>();
+        for (FactPartition factPart : rangeParts) {
+          for (String table : factPart.getStorageTables()) {
+            if (!tablePartMap.containsKey(table)) {
+              tablePartMap.put(table, new LinkedHashSet<FactPartition>(Arrays.asList(factPart)));
+            } else {
+              LinkedHashSet<FactPartition> storagePart = tablePartMap.get(table);
+              storagePart.add(factPart);
+            }
+          }
+        }
+        cfact.getRangeToStoragePartMap().put(range, tablePartMap);
         cfact.incrementPartsQueried(rangeParts.size());
         answeringParts.addAll(rangeParts);
         cfact.getPartsQueried().addAll(rangeParts);
@@ -436,6 +452,28 @@ class StorageTableResolver implements ContextRewriter {
       Set<String> storageTables = new LinkedHashSet<String>();
       storageTables.addAll(minimalStorageTables.keySet());
       cfact.setStorageTables(storageTables);
+
+      // Update range->storage->partitions with time range where clause
+      for (TimeRange trange : cfact.getRangeToStoragePartMap().keySet()) {
+        Map<String, String> rangeToWhere = new HashMap<String, String>();
+        for (Map.Entry<String, Set<FactPartition>> entry : minimalStorageTables.entrySet()) {
+          String table = entry.getKey();
+          Set<FactPartition> minimalParts = entry.getValue();
+
+          LinkedHashSet<FactPartition> rangeParts = cfact.getRangeToStoragePartMap().get(trange).get(table);
+          LinkedHashSet<FactPartition> minimalPartsCopy = new LinkedHashSet<FactPartition>(minimalParts);
+          minimalPartsCopy.retainAll(rangeParts);
+          if (!StringUtils.isEmpty(whereClasueForFallback.get(trange))) {
+            rangeToWhere.put(
+                rangeWriter.getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()),
+                    minimalPartsCopy) + " and  " + whereClasueForFallback.get(trange), table);
+          } else {
+            rangeToWhere.put(rangeWriter.getTimeRangeWhereClause(cubeql,
+                cubeql.getAliasForTableName(cubeql.getCube().getName()), minimalPartsCopy), table);
+          }
+        }
+        cfact.getRangeToStorageWhereMap().put(trange, rangeToWhere);
+      }
       // multi table select is already false, do not alter it
       if (cfact.isEnabledMultiTableSelect()) {
         cfact.setEnabledMultiTableSelect(enabledMultiTableSelect);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
new file mode 100644
index 0000000..e6ed86b
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import java.util.ArrayList;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang.NotImplementedException;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+@AllArgsConstructor
+@NoArgsConstructor
+public abstract class UnionHQLContext implements HQLContextInterface {
+
+  @Getter
+  @Setter
+  List<HQLContextInterface> hqlContexts = new ArrayList<HQLContextInterface>();
+
+  @Override
+  public String toHQL() throws SemanticException {
+    Set<String> queryParts = new LinkedHashSet<String>();
+    for (HQLContextInterface ctx : hqlContexts) {
+      queryParts.add(ctx.toHQL());
+    }
+    return StringUtils.join(queryParts, " UNION ALL ");
+  }
+
+  @Override
+  public String getSelect()  {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public String getFrom() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public String getWhere() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public String getGroupby() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public String getHaving() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public String getOrderby() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+  @Override
+  public Integer getLimit() {
+    throw new NotImplementedException("Not Implemented");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 1fe4173..5737057 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -47,6 +47,8 @@ import org.testng.Assert;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+
+import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
 
 /*
@@ -119,6 +121,8 @@ public class CubeTestSetup {
   private static String c3 = "C3";
   private static String c4 = "C4";
   private static String c99 = "C99";
+  @Getter
+  private static Map<String, String> storageToUpdatePeriodMap = new LinkedHashMap<String, String>();
 
   static {
     Calendar cal = Calendar.getInstance();
@@ -195,8 +199,6 @@ public class CubeTestSetup {
   public static String getExpectedQuery(String cubeName, String selExpr, String whereExpr, String postWhereExpr,
     Map<String, String> storageTableToWhereClause, List<String> notLatestConditions) {
     StringBuilder expected = new StringBuilder();
-    int numTabs = storageTableToWhereClause.size();
-    Assert.assertEquals(1, numTabs);
     for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
       String storageTable = entry.getKey();
       expected.append(selExpr);
@@ -316,11 +318,21 @@ public class CubeTestSetup {
     return getWhereForDailyAndHourly2daysWithTimeDim(cubeName, timedDimension, TWODAYS_BACK, NOW, storageTables);
   }
 
+
   public static Map<String, String> getWhereForDailyAndHourly2daysWithTimeDim(String cubeName, String timedDimension,
     Date from, Date to, String... storageTables) {
     Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
-    String whereClause = getWhereForDailyAndHourly2daysWithTimeDim(cubeName, timedDimension, from, to);
-    storageTableToWhereClause.put(getStorageTableString(storageTables), whereClause);
+    if (storageToUpdatePeriodMap.isEmpty()) {
+      String whereClause = getWhereForDailyAndHourly2daysWithTimeDim(cubeName, timedDimension, from, to);
+      storageTableToWhereClause.put(getStorageTableString(storageTables), whereClause);
+    } else {
+      for (String tbl : storageTables) {
+        String updatePeriod = storageToUpdatePeriodMap.get(tbl);
+        String whereClause = getWhereForDailyAndHourly2daysWithTimeDimUnionQuery(cubeName, timedDimension, from, to)
+            .get(updatePeriod);
+        storageTableToWhereClause.put(getStorageTableString(tbl), whereClause);
+      }
+    }
     return storageTableToWhereClause;
   }
 
@@ -357,6 +369,26 @@ public class CubeTestSetup {
     return StorageUtil.getWherePartClause(timedDimension, cubeName, parts);
   }
 
+  public static Map<String, String> getWhereForDailyAndHourly2daysWithTimeDimUnionQuery(String cubeName,
+      String timedDimension, Date from, Date to) {
+    Map<String, String> updatePeriodToWhereMap = new HashMap<String, String>();
+    List<String> hourlyparts = new ArrayList<String>();
+    List<String> dailyparts = new ArrayList<String>();
+    Date dayStart;
+    if (!CubeTestSetup.isZerothHour()) {
+      addParts(hourlyparts, UpdatePeriod.HOURLY, from, DateUtil.getCeilDate(from, UpdatePeriod.DAILY));
+      addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(to, UpdatePeriod.DAILY),
+          DateUtil.getFloorDate(to, UpdatePeriod.HOURLY));
+      dayStart = DateUtil.getCeilDate(from, UpdatePeriod.DAILY);
+    } else {
+      dayStart = from;
+    }
+    addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getFloorDate(to, UpdatePeriod.DAILY));
+    updatePeriodToWhereMap.put("DAILY", StorageUtil.getWherePartClause(timedDimension, cubeName, dailyparts));
+    updatePeriodToWhereMap.put("HOURLY", StorageUtil.getWherePartClause(timedDimension, cubeName, hourlyparts));
+    return updatePeriodToWhereMap;
+  }
+
   // storageTables[0] is hourly
   // storageTables[1] is daily
   // storageTables[2] is monthly
@@ -411,6 +443,52 @@ public class CubeTestSetup {
     return storageTableToWhereClause;
   }
 
+  public static Map<String, String> getWhereForMonthlyDailyAndHourly2monthsUnionQuery(String storageTable) {
+    Map<String, List<String>> updatePeriodToPart = new LinkedHashMap<String, List<String>>();
+    List<String> hourlyparts = new ArrayList<String>();
+    List<String> dailyparts = new ArrayList<String>();
+    List<String> monthlyparts = new ArrayList<String>();
+
+    Date dayStart = TWO_MONTHS_BACK;
+    Date monthStart = TWO_MONTHS_BACK;
+    if (!CubeTestSetup.isZerothHour()) {
+      addParts(hourlyparts, UpdatePeriod.HOURLY, TWO_MONTHS_BACK,
+          DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY));
+      addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY),
+          DateUtil.getFloorDate(NOW, UpdatePeriod.HOURLY));
+      dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY);
+      monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+    }
+    Calendar cal = new GregorianCalendar();
+    cal.setTime(dayStart);
+    if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
+      addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY));
+      monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+    }
+    addParts(dailyparts, UpdatePeriod.DAILY, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY),
+        DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY));
+    addParts(monthlyparts, UpdatePeriod.MONTHLY, monthStart, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY));
+
+    updatePeriodToPart.put("HOURLY", hourlyparts);
+    updatePeriodToPart.put("DAILY", dailyparts);
+    updatePeriodToPart.put("MONTHLY", monthlyparts);
+
+    List<String> unionParts = new ArrayList<String>();
+    for (Map.Entry<String, String> entry : storageToUpdatePeriodMap.entrySet()) {
+      String uperiod = entry.getKey();
+      String table = entry.getValue();
+      if (table.equals(storageTable) && updatePeriodToPart.containsKey(uperiod)) {
+        unionParts.addAll(updatePeriodToPart.get(uperiod));
+        Collections.sort(unionParts);
+      }
+    }
+
+    HashMap<String, String> tabWhere = new LinkedHashMap<String, String>();
+    tabWhere.put(getStorageTableString(storageTable), StorageUtil.getWherePartClause("dt", TEST_CUBE_NAME, unionParts));
+
+    return tabWhere;
+  }
+
   public static Map<String, String> getWhereForMonthly2months(String monthlyTable) {
     Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
     List<String> parts = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/ab22a24b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 44826bd..0f51978 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -359,46 +359,76 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testCubeWhereQueryDuplicatePartitionElimination() throws Exception {
+    Configuration conf = getConf();
+    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
+    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "DAILY,HOURLY");
+    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
+    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY");
+
+    try {
+      CubeTestSetup.getStorageToUpdatePeriodMap().put("HOURLY", "c1_testfact");
+      CubeTestSetup.getStorageToUpdatePeriodMap().put("DAILY", "c1_testfact");
+      CubeTestSetup.getStorageToUpdatePeriodMap().put("MONTHLY", "c2_testfact");
+
+      // Union query
+      String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+      System.out.println("HQL: " + hqlQuery);
+
+      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c1_testfact"));
+      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c2_testfact"));
+
+      System.out.println("Expected1 : " + expected1);
+      System.out.println("Expected2 : " + expected2);
+
+      TestCubeRewriter.compareContains(expected1, hqlQuery);
+      TestCubeRewriter.compareContains(expected2, hqlQuery);
+      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+    } finally {
+      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+    }
+
+  }
+
+  @Test
   public void testCubeWhereQueryWithMultipleTables() throws Exception {
     Configuration conf = getConf();
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT, false);
     conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "DAILY");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
-    String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-
-    String expected = null;
-    if (!CubeTestSetup.isZerothHour()) {
-      expected =
-        getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-          getWhereForDailyAndHourly2days(cubeName, "c1_testfact", "C2_testfact"));
-    } else {
-      expected =
-        getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-          getWhereForDailyAndHourly2days(cubeName, "c1_testfact"));
-    }
-    compareQueries(expected, hqlQuery);
 
-    // Union query
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT, false);
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", "DAILY");
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", "HOURLY");
+
     try {
-      // rewrite to union query
-      hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-      System.out.println("Union hql query:" + hqlQuery);
-
-      // TODO: uncomment the following once union query
-      // rewriting has been done
-      // expected = // write expected union query
-      // compareQueries(expected, hqlQuery);
-    } catch (Exception e) {
-      e.printStackTrace();
+      // Union query
+      String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
+      System.out.println("HQL:" + hqlQuery);
+
+      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForDailyAndHourly2days(cubeName, "c1_testfact"));
+      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForDailyAndHourly2days(cubeName, "c2_testfact"));
+
+      System.out.println("Expected1 : " + expected1);
+      System.out.println("Expected2 : " + expected2);
+
+      TestCubeRewriter.compareContains(expected1, hqlQuery);
+      TestCubeRewriter.compareContains(expected2, hqlQuery);
+      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+    } finally {
+      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
     }
   }
 
   @Test
   public void testCubeWhereQueryWithMultipleTablesForMonth() throws Exception {
     Configuration conf = getConf();
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT, true);
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_MULTI_TABLE_SELECT, false);
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
@@ -407,33 +437,32 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "DAILY");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C3"), "MONTHLY");
 
-    String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
-    String expected;
-    if (!CubeTestSetup.isZerothHour()) {
-      expected =
-        getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-          getWhereForMonthlyDailyAndHourly2months("c1_testfact", "c2_testFact", "C3_testfact"));
-    } else {
-      expected =
-        getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-          getWhereForMonthlyDailyAndHourly2months("c1_testfact", "c2_testfact", "c3_testFact"));
-    }
-    compareQueries(hqlQuery, expected);
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("HOURLY", "c1_testfact");
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("DAILY", "c2_testfact");
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("MONTHLY", "c3_testfact");
 
-    // monthly - c1,c2; daily - c1, hourly -c2
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "MONTHLY,DAILY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,HOURLY");
     try {
-      hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
-      System.out.println("union query:" + hqlQuery);
-      // TODO: uncomment the following once union query rewriting has been done
-      // expected = getExpectedQuery(cubeName,
-      // "select sum(testcube.msr2) FROM ", null, null,
-      // getWhereForMonthlyDailyAndHourly2months("C1_testfact"));
-      // compareQueries(expected, hqlQuery);
-    } catch (Exception e) {
-      e.printStackTrace();
+      // Union query
+      String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+      System.out.println("HQL:" + hqlQuery);
+
+      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c1_testfact"));
+      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c2_testfact"));
+      String expected3 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+          getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c3_testfact"));
+
+      System.out.println("Expected1 : " + expected1);
+      System.out.println("Expected2 : " + expected2);
+      System.out.println("Expected3 : " + expected3);
+
+      TestCubeRewriter.compareContains(expected1, hqlQuery);
+      TestCubeRewriter.compareContains(expected2, hqlQuery);
+      TestCubeRewriter.compareContains(expected3, hqlQuery);
+      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+    } finally {
+      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
     }
   }