You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/05 12:39:28 UTC

svn commit: r1464916 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ java/org/apache/hadoop/hive/ql/cube/processors/ test/org/apache/hadoop/hive/ql/cube/parse/ test/org/apac...

Author: amareshwari
Date: Fri Apr  5 10:39:27 2013
New Revision: 1464916

URL: http://svn.apache.org/r1464916
Log:
Add support for dimension only queries

Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java Fri Apr  5 10:39:27 2013
@@ -164,4 +164,7 @@ public final class CubeDimensionTable ex
     return snapshotDumpPeriods.keySet();
   }
 
+  public boolean hasStorageSnapshots(String storage) {
+    return (snapshotDumpPeriods.get(storage) != null);
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Fri Apr  5 10:39:27 2013
@@ -167,4 +167,9 @@ public final class CubeFactTable extends
   public Set<String> getStorages() {
     return storageUpdatePeriods.keySet();
   }
+
+  public String getCubeName() {
+    return cubeName;
+  }
+
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java Fri Apr  5 10:39:27 2013
@@ -81,9 +81,16 @@ public abstract class Storage implements
     return latestSpec;
   }
 
+  public static List<String> getPartitionsForLatest() {
+    List<String> parts = new ArrayList<String>();
+    parts.add(StorageConstants.LATEST_PARTITION_VALUE);
+    return parts;
+  }
+
   private static FieldSchema dtPart = new FieldSchema(getDatePartitionKey(),
       serdeConstants.STRING_TYPE_NAME,
       "date partition");
+
   public static FieldSchema getDatePartition() {
     return dtPart;
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Fri Apr  5 10:39:27 2013
@@ -46,7 +46,8 @@ public class CubeQueryContext {
   private Date timeTo;
   private String clauseName = null;
   private Map<String, List<String>> partitionCols;
-  protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap;
+  protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
+      new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
 
   public CubeQueryContext(ASTNode ast, QB qb, HiveConf conf)
       throws SemanticException {
@@ -64,6 +65,7 @@ public class CubeQueryContext {
     this.fromDateRaw = other.fromDateRaw;
     this.toDateRaw = other.toDateRaw;
     this.dimensions = other.dimensions;
+    this.cube = other.cube;
     this.candidateFactTables = other.candidateFactTables;
     this.timeFrom = other.timeFrom;
     this.timeTo = other.timeTo;
@@ -76,6 +78,14 @@ public class CubeQueryContext {
     return qb;
   }
 
+  public boolean hasCubeInQuery() {
+    return cube != null;
+  }
+
+  public boolean hasDimensionInQuery() {
+    return dimensions != null && !dimensions.isEmpty();
+  }
+
   private void extractMetaTables() throws SemanticException {
     try {
       CubeMetastoreClient client;
@@ -97,7 +107,9 @@ public class CubeQueryContext {
       if (cube == null && dimensions.size() == 0) {
         throw new SemanticException("Neither cube nor dimensions accessed");
       }
-      candidateFactTables.addAll(client.getAllFactTables(cube));
+      if (cube != null) {
+        candidateFactTables.addAll(client.getAllFactTables(cube));
+      }
     } catch (HiveException e) {
       throw new SemanticException(e);
     }
@@ -112,6 +124,9 @@ public class CubeQueryContext {
   }
 
   private void extractTimeRange() throws SemanticException {
+    if (cube == null) {
+      return;
+    }
     // get time range -
     // Time range should be direct child of where condition
     // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
@@ -463,6 +478,13 @@ public class CubeQueryContext {
     return qb.getParseInfo().getOrderByForClause(getClause());
   }
 
+  public ASTNode getFromTree() {
+    return qb.getParseInfo().getSrcForAlias(qb.getTabAliases().iterator().next());
+  }
+
+  public Integer getLimitValue() {
+    return qb.getParseInfo().getDestLimit(getClause());
+  }
   public String toHQL() throws SemanticException {
     return null;
   }
@@ -473,6 +495,6 @@ public class CubeQueryContext {
 
   public void setFactPartitionMap(Map<CubeFactTable,
       Map<UpdatePeriod, List<String>>> factPartitionMap) {
-    this.factPartitionMap = factPartitionMap;
+    this.factPartitionMap.putAll(factPartitionMap);
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java Fri Apr  5 10:39:27 2013
@@ -1,11 +1,14 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.cube.metadata.AbstractCubeTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeDimensionTable;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
-import org.apache.hadoop.hive.ql.cube.metadata.Storage;
 import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.QB;
@@ -15,7 +18,18 @@ public class CubeQueryContextWithStorage
 
   private final List<String> supportedStorages;
   private final boolean allStoragesSupported;
-  private Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap;
+  private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
+      new HashMap<CubeFactTable, Map<UpdatePeriod,List<String>>>();
+  private final Map<CubeDimensionTable, List<String>> dimStorageMap =
+      new HashMap<CubeDimensionTable, List<String>>();
+  private final Map<String, String> storageTableToWhereClause =
+      new HashMap<String, String>();
+  private final Map<AbstractCubeTable, String> storageTableToQuery =
+      new HashMap<AbstractCubeTable, String>();
+
+  public boolean hasPartitions() {
+    return !storageTableToWhereClause.isEmpty();
+  }
 
   public CubeQueryContextWithStorage(ASTNode ast, QB qb,
       List<String> supportedStorages) throws SemanticException {
@@ -31,89 +45,157 @@ public class CubeQueryContextWithStorage
     this.allStoragesSupported = (supportedStorages == null);
   }
 
-  public List<String> getStorageNames() {
+  public List<String> getSupportedStorages() {
     return supportedStorages;
   }
 
-  public CubeFactTable getCandidateFactTable() {
-    if (candidateFactTables.size() > 0) {
-      return candidateFactTables.iterator().next();
-    }
-    return null;
-  }
-
-  String simpleQueryFormat = "SELECT %s FROM %s WHERE %s";
-  String joinQueryFormat = "SELECT %s FROM %s JOIN %s WHERE %s";
-
-  private String toHQL(CubeFactTable fact) {
-    Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(fact);
-    Map<UpdatePeriod, List<String>> partColMap = factPartitionMap.get(fact);
-
-    StringBuilder query = new StringBuilder();
-    Iterator<UpdatePeriod> it = partColMap.keySet().iterator();
-    while (it.hasNext()) {
-      UpdatePeriod updatePeriod = it.next();
-      query.append(toHQL(storageTableMap.get(updatePeriod).get(0),
-          partColMap.get(updatePeriod)));
-      if (it.hasNext()) {
-        query.append(" UNION ");
-      }
+  public boolean isStorageSupported(String storage) {
+    if (!allStoragesSupported) {
+      return supportedStorages.contains(storage);
+    }
+    return true;
+  }
+
+  private final String baseQueryFormat = "SELECT %s FROM %s";
+
+  String getQueryFormat() {
+    StringBuilder queryFormat = new StringBuilder();
+    queryFormat.append(baseQueryFormat);
+    if (getWhereTree() != null || hasPartitions()) {
+      queryFormat.append(" WHERE %s");
+    }
+    if (getGroupbyTree() != null) {
+      queryFormat.append(" GROUP BY %s");
+    }
+    if (getHavingTree() != null) {
+      queryFormat.append(" HAVING %s");
+    }
+    if (getOrderbyTree() != null) {
+      queryFormat.append(" ORDER BY %s");
+    }
+    if (getLimitValue() != null) {
+      queryFormat.append(" LIMIT %s");
+    }
+    return queryFormat.toString();
+  }
+
+  private Object[] getQueryTreeStrings(String factStorageTable) {
+    List<String> qstrs = new ArrayList<String>();
+    qstrs.add(HQLParser.getString(getSelectTree()));
+    String fromString = HQLParser.getString(getFromTree()).toLowerCase();
+    String whereString = getWhereTree(factStorageTable);
+    for (Map.Entry<AbstractCubeTable, String> entry :
+        storageTableToQuery.entrySet()) {
+      String src = entry.getKey().getName().toLowerCase();
+      System.out.println("From string:" + fromString + " src:" + src + " value:" + entry.getValue());
+      fromString = fromString.replaceAll(src, entry.getValue() + " " + src);
+    }
+    qstrs.add(fromString);
+    if (whereString != null) {
+      qstrs.add(whereString);
+    }
+    if (getGroupbyTree() != null) {
+      qstrs.add(HQLParser.getString(getGroupbyTree()));
+    }
+    if (getHavingTree() != null) {
+      qstrs.add(HQLParser.getString(getHavingTree()));
     }
-    return query.toString();
+    if (getOrderbyTree() != null) {
+      qstrs.add(HQLParser.getString(getOrderbyTree()));
+    }
+    if (getLimitValue() != null) {
+      qstrs.add(String.valueOf(getLimitValue()));
+    }
+    return qstrs.toArray(new String[0]);
   }
 
-  private String toHQL(String tableName, List<String> parts) {
-    String selectString = HQLParser.getString(getSelectTree());
-    String whereString = getWhereTree(parts);
-    String actualQuery = String.format(simpleQueryFormat, selectString,
-        tableName,
-        whereString);
-    return actualQuery;
-  }
-
-  public String getWhereTree(List<String> parts) {
-    //TODO Construct where tree with part conditions
-    String originalWhereString = HQLParser.getString(super.getWhereTree());
-    String whereWithoutTimerange = originalWhereString.substring(0,
-        originalWhereString.indexOf(TIME_RANGE_FUNC));
-    String whereWithPartCols = whereWithoutTimerange + getWherePartClause(parts);
-    return whereWithPartCols;
-  }
-
-  public String getWherePartClause(List<String> parts) {
-    StringBuilder partStr = new StringBuilder();
-    for (int i = 0; i < parts.size() - 1; i++) {
-      partStr.append(Storage.getDatePartitionKey());
-      partStr.append(" = '");
-      partStr.append(parts.get(i));
-      partStr.append("'");
-      partStr.append(" OR ");
-    }
-
-    // add the last partition
-    partStr.append(Storage.getDatePartitionKey());
-    partStr.append(" = '");
-    partStr.append(parts.get(parts.size() - 1));
-    partStr.append("'");
-    return partStr.toString();
+  private String toHQL(String tableName) {
+    String qfmt = getQueryFormat();
+    System.out.println("qfmt:" + qfmt);
+    return String.format(qfmt, getQueryTreeStrings(tableName));
+  }
+
+  public String getWhereTree(String factStorageTable) {
+    String originalWhereString = HQLParser.getString(getWhereTree());
+    String whereWithoutTimerange;
+    if (factStorageTable != null) {
+      whereWithoutTimerange = originalWhereString.substring(0,
+          originalWhereString.indexOf(CubeQueryContext.TIME_RANGE_FUNC));
+    } else {
+      whereWithoutTimerange = originalWhereString;
+    }
+    // add where clause for all dimensions
+    for (CubeDimensionTable dim : dimensions) {
+      String storageTable = dimStorageMap.get(dim).get(0);
+      storageTableToQuery.put(dim, storageTable);
+      whereWithoutTimerange += storageTableToWhereClause.get(storageTable);
+    }
+    if (factStorageTable != null) {
+      // add where clause for fact;
+      return whereWithoutTimerange + storageTableToWhereClause.get(
+          factStorageTable);
+    } else {
+      return whereWithoutTimerange;
+    }
   }
 
   @Override
   public String toHQL() throws SemanticException {
-    CubeFactTable candidateFactTable = getCandidateFactTable();
-    if (candidateFactTable == null) {
+    CubeFactTable fact = null;
+    if (hasCubeInQuery()) {
+      if (candidateFactTables.size() > 0) {
+        fact = candidateFactTables.iterator().next();
+      }
+    }
+    if (fact == null && !hasDimensionInQuery()) {
       throw new SemanticException("No valid fact table available");
     }
-    return toHQL(candidateFactTable);
+
+    if (fact != null) {
+      Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(fact);
+      Map<UpdatePeriod, List<String>> partColMap = factPartitionMap.get(fact);
+
+      StringBuilder query = new StringBuilder();
+      Iterator<UpdatePeriod> it = partColMap.keySet().iterator();
+      while (it.hasNext()) {
+        UpdatePeriod updatePeriod = it.next();
+        String storageTable = storageTableMap.get(updatePeriod).get(0);
+        storageTableToQuery.put(getCube(), storageTable);
+        query.append(toHQL(storageTable));
+        if (it.hasNext()) {
+          query.append(" UNION ");
+        }
+      }
+      return query.toString();
+    } else {
+      return toHQL(null);
+    }
   }
 
-  public Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getFactStorageMap() {
+  public Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getFactStorageMap()
+  {
     return factStorageMap;
   }
 
   public void setFactStorageMap(Map<CubeFactTable,
-      Map<UpdatePeriod, List<String>>> storageTableMap) {
-    this.factStorageMap = storageTableMap;
+      Map<UpdatePeriod, List<String>>> factStorageMap) {
+    this.factStorageMap.putAll(factStorageMap);
+  }
+
+  public void setDimStorageMap(
+      Map<CubeDimensionTable, List<String>> dimStorageMap) {
+    this.dimStorageMap.putAll(dimStorageMap);
+  }
+
+  public Map<CubeDimensionTable, List<String>> getDimStorageMap() {
+    return this.dimStorageMap;
+  }
+
+  public Map<String, String> getStorageTableToWhereClause() {
+    return storageTableToWhereClause;
   }
 
+  public void setStorageTableToWhereClause(Map<String, String> whereClauseMap) {
+    storageTableToWhereClause.putAll(whereClauseMap);
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Fri Apr  5 10:39:27 2013
@@ -39,6 +39,7 @@ public class CubeQueryRewriter {
     phase2Rewriters.add(new StorageTableResolver(conf));
     phase2Rewriters.add(new LeastPartitionResolver(conf));
     phase2Rewriters.add(new LeastDimensionResolver(conf));
+   // phase2Rewriters.add(new WhereConditionRewriter(conf));
   }
 
   public CubeQueryContext rewritePhase1(ASTNode astnode)

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Fri Apr  5 10:39:27 2013
@@ -19,28 +19,23 @@ public class PartitionResolver implement
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
-    Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
-        new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
-    Date fromDate = cubeql.getFromDate();
-    Date toDate = cubeql.getToDate();
-
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(fromDate);
-
-    for (CubeFactTable fact : cubeql.getFactTables()) {
-      Map<UpdatePeriod, List<String>> partitionColMap =
-          new HashMap<UpdatePeriod, List<String>>();
-      factPartitionMap.put(fact, partitionColMap);
-      getPartitions(fact, fromDate, toDate, partitionColMap);
+    if (!cubeql.getFactTables().isEmpty()) {
+      Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
+          new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
+      Date fromDate = cubeql.getFromDate();
+      Date toDate = cubeql.getToDate();
+
+      Calendar cal = Calendar.getInstance();
+      cal.setTime(fromDate);
+      for (CubeFactTable fact : cubeql.getFactTables()) {
+        Map<UpdatePeriod, List<String>> partitionColMap =
+            new HashMap<UpdatePeriod, List<String>>();
+        factPartitionMap.put(fact, partitionColMap);
+        getPartitions(fact, fromDate, toDate, partitionColMap);
+      }
+      // set partition cols map in cubeql
+      cubeql.setFactPartitionMap(factPartitionMap);
     }
-
-    /*for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
-      partitionColMap.put(MetastoreUtil.getVirtualDimTableName(
-          dim.getName()), dim.getPartitions());
-    }*/
-
-    // set partition cols map in cubeql
-    cubeql.setFactPartitionMap(factPartitionMap);
   }
 
   void getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
@@ -54,14 +49,14 @@ public class PartitionResolver implement
     UpdatePeriod interval = fact.maxIntervalInRange(fromDate, toDate);
     if (interval == null) {
       throw new SemanticException("Could not find a partition for given range:"
-        + fromDate + "-" + toDate);
+          + fromDate + "-" + toDate);
     }
 
     System.out.println("fact: " + fact.getName() + " max interval:" + interval);
     Date ceilFromDate = DateUtils.getCeilDate(fromDate, interval);
     Date floorToDate = DateUtils.getFloorDate(toDate, interval);
     List<String> partitions = fact.getPartitions(ceilFromDate, floorToDate,
-          interval);
+        interval);
     if (partitions != null) {
       List<String> parts = partitionColMap.get(interval);
       if (parts == null) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java Fri Apr  5 10:39:27 2013
@@ -6,6 +6,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeDimensionTable;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
 import org.apache.hadoop.hive.ql.cube.metadata.MetastoreUtil;
 import org.apache.hadoop.hive.ql.cube.metadata.Storage;
@@ -22,6 +23,10 @@ public class StorageTableResolver implem
       throws SemanticException {
     CubeQueryContextWithStorage cubeqlStorage =
         (CubeQueryContextWithStorage) cubeql;
+    Map<String, String> storageTableToWhereClause =
+        new HashMap<String, String>();
+
+    // resolve fact tables
     Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
         new HashMap<CubeFactTable, Map<UpdatePeriod,List<String>>>();
     Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartMap =
@@ -35,13 +40,69 @@ public class StorageTableResolver implem
       for (UpdatePeriod updatePeriod : partitionColMap.keySet()) {
         List<String> storageTables = new ArrayList<String>();
         storageTableMap.put(updatePeriod, storageTables);
+        List<String> parts = partitionColMap.get(updatePeriod);
         for (String storage : fact.getStorages()) {
-          storageTables.add(MetastoreUtil.getFactStorageTableName(
-              fact.getName(), updatePeriod, Storage.getPrefix(storage)));
+          if (cubeqlStorage.isStorageSupported(storage)) {
+            String tableName = MetastoreUtil.getFactStorageTableName(
+                fact.getName(), updatePeriod, Storage.getPrefix(storage));
+            storageTables.add(tableName);
+            storageTableToWhereClause.put(tableName,
+                getWherePartClause(fact.getCubeName(), parts));
+          } else {
+            System.out.println("Storage:" + storage + " is not supported");
+          }
         }
       }
     }
     cubeqlStorage.setFactStorageMap(factStorageMap);
+
+    //resolve dimension tables
+    Map<CubeDimensionTable, List<String>> dimStorageMap =
+        new HashMap<CubeDimensionTable, List<String>>();
+    for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
+        List<String> storageTables = new ArrayList<String>();
+        dimStorageMap.put(dim, storageTables);
+        for (String storage : dim.getStorages()) {
+          if (cubeqlStorage.isStorageSupported(storage)) {
+            String tableName = MetastoreUtil.getDimStorageTableName(
+                dim.getName(), Storage.getPrefix(storage));
+            storageTables.add(tableName);
+            if (dim.hasStorageSnapshots(storage)) {
+              storageTableToWhereClause.put(tableName,
+                getWherePartClause(dim.getName(), Storage.getPartitionsForLatest()));
+            }
+          } else {
+            System.out.println("Storage:" + storage + " is not supported");
+          }
+        }
+      }
+    cubeqlStorage.setDimStorageMap(dimStorageMap);
+    cubeqlStorage.setStorageTableToWhereClause(storageTableToWhereClause);
+  }
+
+  private String getWherePartClause(String tableName, List<String> parts) {
+    if (parts.size() == 0) {
+      return "";
+    }
+    StringBuilder partStr = new StringBuilder();
+    for (int i = 0; i < parts.size() - 1; i++) {
+      partStr.append(tableName);
+      partStr.append(".");
+      partStr.append(Storage.getDatePartitionKey());
+      partStr.append(" = '");
+      partStr.append(parts.get(i));
+      partStr.append("'");
+      partStr.append(" OR ");
+    }
+
+    // add the last partition
+    partStr.append(tableName);
+    partStr.append(".");
+    partStr.append(Storage.getDatePartitionKey());
+    partStr.append(" = '");
+    partStr.append(parts.get(parts.size() - 1));
+    partStr.append("'");
+    return partStr.toString();
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java Fri Apr  5 10:39:27 2013
@@ -1,6 +1,7 @@
 package org.apache.hadoop.hive.ql.cube.processors;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -66,7 +67,11 @@ public class CubeDriver extends Driver {
   }
 
   private List<String> getSupportedStorages(HiveConf conf) {
-    // TODO Auto-generated method stub
+    String[] storages = conf.getStrings(
+        HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString());
+    if (storages != null) {
+      return Arrays.asList(storages);
+    }
     return null;
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Fri Apr  5 10:39:27 2013
@@ -113,11 +113,39 @@ public class CubeTestSetup {
         storageAggregatePeriods);
   }
 
+  private void createDimWithTwoStorages(CubeMetastoreClient client)
+      throws HiveException {
+    String dimName = "citytable";
+
+    List<FieldSchema>  dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "code"));
+    dimColumns.add(new FieldSchema("name", "string", "field1"));
+    dimColumns.add(new FieldSchema("stateid", "int", "state id"));
+
+    Map<String, TableReference> dimensionReferences =
+        new HashMap<String, TableReference>();
+    dimensionReferences.put("stateid", new TableReference("statetable", "id"));
+
+    Storage hdfsStorage1 = new HDFSStorage("C1",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    Storage hdfsStorage2 = new HDFSStorage("C2",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    Map<Storage, UpdatePeriod> snapshotDumpPeriods =
+        new HashMap<Storage, UpdatePeriod>();
+    snapshotDumpPeriods.put(hdfsStorage1, UpdatePeriod.HOURLY);
+    snapshotDumpPeriods.put(hdfsStorage2, null);
+    client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
+        snapshotDumpPeriods);
+  }
+
   public void createSources() throws Exception {
     CubeMetastoreClient client =  CubeMetastoreClient.getInstance(
         new HiveConf(this.getClass()));
     createCube(client);
     createCubeFact(client);
+    createDimWithTwoStorages(client);
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1464916&r1=1464915&r2=1464916&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Fri Apr  5 10:39:27 2013
@@ -4,6 +4,7 @@ import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.cube.parse.CubeTestSetup;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -13,13 +14,13 @@ import org.junit.Test;
 
 public class TestCubeDriver {
 
-  private static CubeDriver driver;
+  private final Configuration conf = new Configuration();
+  private CubeDriver driver;
 
   @BeforeClass
   public static void setup() throws Exception {
     CubeTestSetup setup = new CubeTestSetup();
     setup.createSources();
-    driver = new CubeDriver(new HiveConf());
   }
 
   public static String HOUR_FMT = "yyyy-MM-dd HH";
@@ -29,8 +30,9 @@ public class TestCubeDriver {
     return HOUR_PARSER.format(dt);
   }
 
-  @Test
+  //@Test
   public void testSimpleQuery1() throws Exception {
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     Throwable th = null;
     try {
       String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
@@ -44,6 +46,7 @@ public class TestCubeDriver {
 
   @Test
   public void testSimpleQuery2() throws Exception {
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     Calendar cal = Calendar.getInstance();
     Date now = cal.getTime();
     System.out.println("Test now:" + now);
@@ -58,4 +61,37 @@ public class TestCubeDriver {
     //Assert.assertEquals(queries[1], cubeql.toHQL());
   }
 
+  @Test
+  public void testDimensionQueryWithMultipleStorages() throws Exception {
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    String hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
+    System.out.println("cube hql:" + hqlQuery);
+
+    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
+    System.out.println("cube hql:" + hqlQuery);
+    //Assert.assertEquals(queries[1], cubeql.toHQL());
+
+    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
+    System.out.println("cube hql:" + hqlQuery);
+
+    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
+    System.out.println("cube hql:" + hqlQuery);
+
+  }
+
+  @Test
+  public void testLimitQuery() throws Exception {
+    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    String hqlQuery = driver.compileCubeQuery("select name, stateid from citytable limit 100");
+    System.out.println("cube hql:" + hqlQuery);
+    //Assert.assertEquals(queries[1], cubeql.toHQL());
+  }
+
 }