You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/05/02 10:39:28 UTC

svn commit: r1478299 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/processors/

Author: amareshwari
Date: Thu May  2 08:39:27 2013
New Revision: 1478299

URL: http://svn.apache.org/r1478299
Log:
Add candidate table resolver - rewriter

Removed:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CandidateTableResolver.java
Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/AbstractCubeTable.java Thu May  2 08:39:27 2013
@@ -73,4 +73,9 @@ public abstract class AbstractCubeTable 
     }
     return true;
   }
+
+  @Override
+  public String toString() {
+    return getName();
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Thu May  2 08:39:27 2013
@@ -45,20 +45,36 @@ public class CubeQueryContext {
 
   private final ASTNode ast;
   private final QB qb;
+  private String clauseName = null;
   private final HiveConf conf;
+
   private String fromDateRaw;
   private String toDateRaw;
+  private Date timeFrom;
+  private Date timeTo;
+
+  // metadata
   private Cube cube;
+  private List<String> cubeMeasureNames;
+  private List<String> cubeDimNames;
   protected Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
+  private final Map<AbstractCubeTable, List<String>> cubeTabToCols =
+      new HashMap<AbstractCubeTable, List<String>>();
   protected Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
-  private final Map<String, List<String>> tblToColumns = new HashMap<String, List<String>>();
-  private Date timeFrom;
-  private Date timeTo;
-  private String clauseName = null;
-  private Map<String, List<String>> partitionCols;
+
+  // fields queried
+  private final Map<String, List<String>> tblAliasToColumns = new HashMap<String, List<String>>();
+  private final Set<String> cubeColumnsQueried = new HashSet<String>();
+  private final Map<String, String> columnToTabAlias = new HashMap<String, String>();
   protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
       new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
+  private final Map<CubeQueryExpr, Set<String>> exprToCols = new HashMap<CubeQueryExpr, Set<String>>();
+  private final Map<CubeQueryExpr, Set<String>> queryExprToExprs = new HashMap<CubeQueryExpr, Set<String>>();
+  private final Map<String, String> exprToAlias = new HashMap<String, String>();
+  private final Set<String> aggregateCols = new HashSet<String>();
+  private final Set<String> aggregateExprs = new HashSet<String>();
 
+  // storage specific
   private List<String> supportedStorages;
   private boolean allStoragesSupported;
   private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
@@ -69,16 +85,8 @@ public class CubeQueryContext {
       new HashMap<String, String>();
   private final Map<AbstractCubeTable, String> storageTableToQuery =
       new HashMap<AbstractCubeTable, String>();
-  private final Map<String, String> columnToTabAlias = new HashMap<String, String>();
-  private List<String> cubeMeasureNames;
-  private List<String> cubeDimNames;
-  private final Map<AbstractCubeTable, List<String>> cubeTabToCols =
-      new HashMap<AbstractCubeTable, List<String>>();
-  private final Map<CubeQueryExpr, Set<String>> exprToCols = new HashMap<CubeQueryExpr, Set<String>>();
-  private final Map<CubeQueryExpr, Set<String>> queryExprToExprs = new HashMap<CubeQueryExpr, Set<String>>();
-  private final Map<String, String> exprToAlias = new HashMap<String, String>();
-  private final Set<String> aggregateCols = new HashSet<String>();
-  private final Set<String> aggregateExprs = new HashSet<String>();
+
+  // query trees
   private ASTNode fromTree;
   private String whereTree;
   private String havingTree;
@@ -119,6 +127,7 @@ public class CubeQueryContext {
     extractTimeRange();
     extractColumns();
     extractTabAliasForCol();
+    findCandidateFactTables();
   }
 
   public boolean hasCubeInQuery() {
@@ -215,13 +224,11 @@ public class CubeQueryContext {
     Date now = new Date();
 
     try {
-      timeFrom = DateUtils.resolveDate(fromDateRaw, now);
-      timeTo = DateUtils.resolveDate(toDateRaw, now);
+      timeFrom = DateUtil.resolveDate(fromDateRaw, now);
+      timeTo = DateUtil.resolveDate(toDateRaw, now);
     } catch (HiveException e) {
       throw new SemanticException(e);
     }
-    System.out.println("timeFrom:" + timeFrom);
-    System.out.println("timeTo:" + timeTo);
   }
 
   private void extractColumns() throws SemanticException {
@@ -248,15 +255,20 @@ public class CubeQueryContext {
     for (CubeQueryExpr expr : CubeQueryExpr.values()) {
       Set<String> columns = new HashSet<String>();
       exprToCols.put(expr, columns);
-      getColsForTree(getExprTree(expr), columns, tblToColumns, exprToAlias);
+      getColsForTree(getExprTree(expr), columns, tblAliasToColumns, exprToAlias);
     }
 
     for (ASTNode aggrTree : qb.getParseInfo().getAggregationExprsForClause(clauseName).values()) {
       getColsForTree(aggrTree, aggregateCols, null, null);
       String aggr = HQLParser.getString(aggrTree);
-      System.out.println("Adding aggr expr:" + aggr);
       aggregateExprs.add(aggr.toLowerCase());
     }
+    if (cube != null) {
+      String cubeAlias = getAliasForTabName(cube.getName());
+      if (tblAliasToColumns.get(cubeAlias) != null) {
+        cubeColumnsQueried.addAll(tblAliasToColumns.get(cubeAlias));
+      }
+    }
   }
 
   private ASTNode getExprTree(CubeQueryExpr expr) {
@@ -340,7 +352,7 @@ public class CubeQueryContext {
     });
   }
   private void extractTabAliasForCol() throws SemanticException {
-    List<String> columns = tblToColumns.get(DEFAULT_TABLE);
+    List<String> columns = tblAliasToColumns.get(DEFAULT_TABLE);
     if (columns == null) {
       return;
     }
@@ -348,13 +360,12 @@ public class CubeQueryContext {
       if (cube != null) {
         List<String> cols = cubeTabToCols.get(cube);
         if (cols.contains(col.toLowerCase())) {
-          System.out.println("Found table " + cube.getName());
           columnToTabAlias.put(col, getAliasForTabName(cube.getName()));
+          cubeColumnsQueried.add(col);
         }
       }
       for (CubeDimensionTable dim: dimensions) {
         if (cubeTabToCols.get(dim).contains(col.toLowerCase())) {
-          System.out.println("Found table " + dim.getName());
           columnToTabAlias.put(col, dim.getName());
           break;
         }
@@ -366,6 +377,29 @@ public class CubeQueryContext {
     }
   }
 
+  private void findCandidateFactTables() throws SemanticException {
+    if (cube != null) {
+      // go over the columns accessed in the query and find out which tables
+      // can answer the query
+      for (Iterator<CubeFactTable> i = candidateFactTables.iterator(); i.hasNext();) {
+        CubeFactTable fact = i.next();
+        List<String> factCols = cubeTabToCols.get(fact);
+        for (String col : cubeColumnsQueried) {
+          if (!factCols.contains(col)) {
+            System.out.println("Not considering the fact table:" + fact +
+                " as column " + col + " is not available");
+            i.remove();
+            break;
+          }
+        }
+      }
+      if (candidateFactTables.size() == 0) {
+        throw new SemanticException("No candidate fact table available to" +
+            " answer the query");
+      }
+    }
+  }
+
   public String getFromDateRaw() {
     return fromDateRaw;
   }
@@ -390,7 +424,7 @@ public class CubeQueryContext {
     return qb;
   }
 
-  public Set<CubeFactTable> getFactTables() {
+  public Set<CubeFactTable> getCandidateFactTables() {
     return candidateFactTables;
   }
 
@@ -595,7 +629,6 @@ public class CubeQueryContext {
 
   public ASTNode getFromTree() {
     if (cube != null) {
-      System.out.println("alias:" + getAliasForTabName(cube.getName()));
       return qb.getParseInfo().getSrcForAlias(getAliasForTabName(cube.getName()));
     }
     return qb.getParseInfo().getSrcForAlias(qb.getTabAliases().iterator().next());
@@ -653,6 +686,7 @@ public class CubeQueryContext {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(getSelectTree());
     String fromString = HQLParser.getString(getFromTree()).toLowerCase();
+    String whereString = getWhereTree(factStorageTable);
     for (Map.Entry<AbstractCubeTable, String> entry :
       storageTableToQuery.entrySet()) {
       String src = entry.getKey().getName().toLowerCase();
@@ -664,7 +698,6 @@ public class CubeQueryContext {
     if (joinTree != null) {
       qstrs.add(HQLParser.getString(joinTree));
     }
-    String whereString = getWhereTree(factStorageTable);
     if (whereString != null) {
       qstrs.add(whereString);
     }
@@ -689,46 +722,63 @@ public class CubeQueryContext {
     return String.format(qfmt, getQueryTreeStrings(tableName));
   }
 
-  public String getWhereTree(String factStorageTable) {
+  private void appendWhereClause(StringBuilder whereWithoutTimerange,
+      String whereClause, boolean hasMore) {
+    if (hasMore) {
+      whereWithoutTimerange.append(" AND ");
+    }
+    appendWhereClause(whereWithoutTimerange, whereClause);
+  }
+
+  private void appendWhereClause(CubeDimensionTable dim, StringBuilder whereString,
+      boolean hasMore) {
+    String storageTable = dimStorageMap.get(dim).get(0);
+    storageTableToQuery.put(dim, storageTable);
+    String whereClause = storageTableToWhereClause.get(storageTable);
+    if ( whereClause != null) {
+      appendWhereClause(whereString, whereClause, hasMore);
+    }
+  }
+
+  private void appendWhereClause(StringBuilder whereWithoutTimerange,
+      String whereClause) {
+    whereWithoutTimerange.append("(");
+    whereWithoutTimerange.append(whereClause);
+    whereWithoutTimerange.append(")");
+  }
+
+  private String getWhereTree(String factStorageTable) {
     String originalWhereString = getWhereTree();
-    String whereWithoutTimerange;
+    StringBuilder whereWithoutTimerange;
+
     if (factStorageTable != null) {
-      whereWithoutTimerange = originalWhereString.substring(0,
-          originalWhereString.indexOf(CubeQueryContext.TIME_RANGE_FUNC));
+      whereWithoutTimerange = new StringBuilder(originalWhereString.substring(0,
+          originalWhereString.indexOf(CubeQueryContext.TIME_RANGE_FUNC)));
+      // add where clause for fact;
+      appendWhereClause(whereWithoutTimerange, storageTableToWhereClause.get(
+          factStorageTable));
     } else {
-      whereWithoutTimerange = originalWhereString;
+      if (originalWhereString != null) {
+      whereWithoutTimerange = new StringBuilder(originalWhereString);
+      } else {
+        whereWithoutTimerange = new StringBuilder();
+      }
     }
-    boolean dimensionsAdded = false;
+
     // add where clause for all dimensions
     Iterator<CubeDimensionTable> it = dimensions.iterator();
     if (it.hasNext()) {
-      dimensionsAdded = true;
       CubeDimensionTable dim = it.next();
+      appendWhereClause(dim, whereWithoutTimerange, factStorageTable != null);
       while (it.hasNext()) {
-        String storageTable = dimStorageMap.get(dim).get(0);
-        storageTableToQuery.put(dim, storageTable);
-        whereWithoutTimerange += "(";
-        whereWithoutTimerange += storageTableToWhereClause.get(storageTable);
-        whereWithoutTimerange += ") AND";
+        appendWhereClause(dim, whereWithoutTimerange, true);
         dim = it.next();
       }
-      String storageTable = dimStorageMap.get(dim).get(0);
-      storageTableToQuery.put(dim, storageTable);
-      whereWithoutTimerange += "(";
-      whereWithoutTimerange += storageTableToWhereClause.get(storageTable);
-      whereWithoutTimerange += ") ";
     }
-    if (factStorageTable != null) {
-      if (dimensionsAdded) {
-        whereWithoutTimerange += " AND ";
-      }
-      // add where clause for fact;
-      whereWithoutTimerange += "(";
-      whereWithoutTimerange +=  storageTableToWhereClause.get(
-          factStorageTable);
-      whereWithoutTimerange += ") ";
+    if (whereWithoutTimerange.length() == 0) {
+      return null;
     }
-    return whereWithoutTimerange;
+    return whereWithoutTimerange.toString();
   }
 
   public String toHQL() throws SemanticException {
@@ -803,7 +853,7 @@ public class CubeQueryContext {
   }
 
   public Map<String, List<String>> getTblToColumns() {
-    return tblToColumns;
+    return tblAliasToColumns;
   }
 
   public Map<String, String> getColumnsToTableAlias() {
@@ -836,7 +886,6 @@ public class CubeQueryContext {
 
   public boolean isCubeMeasure(String col) {
     String[] split = col.split("\\.");
-    System.out.println("Looking for col" + col + " split" + split.length);
     if (split.length <= 1) {
       return cubeMeasureNames.contains(col);
     } else {
@@ -863,4 +912,17 @@ public class CubeQueryContext {
   public String getAlias(String expr) {
     return exprToAlias.get(expr);
   }
+
+  public Set<String> getCubeColumnsQueried() {
+    return cubeColumnsQueried;
+  }
+
+  public Map<AbstractCubeTable, List<String>> getCubeTabToCols() {
+    return cubeTabToCols;
+  }
+
+  public void removeCandidateFact(CubeFactTable fact) {
+    candidateFactTables.remove(fact);
+  }
+
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Thu May  2 08:39:27 2013
@@ -30,7 +30,6 @@ public class CubeQueryRewriter {
     phase1Rewriters.add(new GroupbyResolver(conf));
     //Rewrite base trees (groupby, having, orderby, limit) using aliases
     phase1Rewriters.add(new AliasReplacer(conf));
-    phase1Rewriters.add(new CandidateTableResolver(conf));
     //Resolve partition columns and table names
     phase1Rewriters.add(new PartitionResolver(conf));
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java Thu May  2 08:39:27 2013
@@ -223,7 +223,6 @@ public class DateUtil {
       cal.set(Calendar.SECOND, 0);
       break;
     }
-    System.out.println("Date:" + toDate + " Floordate for interval:" + interval + " is " + cal.getTime());
     return cal.getTime();
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java Thu May  2 08:39:27 2013
@@ -34,14 +34,12 @@ public class GroupbyResolver implements 
     for (String expr : selectExprs) {
       if (cubeql.hasAggregates()) {
         String alias = cubeql.getAlias(expr);
-        System.out.println("expr:" + expr + "alias:" + alias);
         if (alias != null) {
           expr = expr.substring(0, (expr.length()-alias.length())).trim();
         }
         if (!groupByExprs.contains(expr)) {
           if (!cubeql.isAggregateExpr(expr)) {
             String groupbyExpr = expr;
-            System.out.println("groupby expr:" + groupbyExpr);
             if (groupByTree != null) {
               groupByTree += ", ";
               groupByTree += groupbyExpr;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Thu May  2 08:39:27 2013
@@ -4,6 +4,7 @@ import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -19,7 +20,7 @@ public class PartitionResolver implement
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
-    if (!cubeql.getFactTables().isEmpty()) {
+    if (!cubeql.getCandidateFactTables().isEmpty()) {
       Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
           new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
       Date fromDate = cubeql.getFromDate();
@@ -27,32 +28,34 @@ public class PartitionResolver implement
 
       Calendar cal = Calendar.getInstance();
       cal.setTime(fromDate);
-      for (CubeFactTable fact : cubeql.getFactTables()) {
+      for (Iterator<CubeFactTable> i = cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+        CubeFactTable fact = i.next();
         Map<UpdatePeriod, List<String>> partitionColMap =
             new HashMap<UpdatePeriod, List<String>>();
         factPartitionMap.put(fact, partitionColMap);
-        getPartitions(fact, fromDate, toDate, partitionColMap);
+        if (!getPartitions(fact, fromDate, toDate, partitionColMap, cubeql)) {
+          i.remove();
+        }
       }
       // set partition cols map in cubeql
       cubeql.setFactPartitionMap(factPartitionMap);
     }
   }
 
-  void getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
-      Map<UpdatePeriod, List<String>> partitionColMap)
+  private boolean getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
+      Map<UpdatePeriod, List<String>> partitionColMap, CubeQueryContext cubeql)
           throws SemanticException {
-    System.out.println("getPartitions fromDate:" + fromDate + " toDate:" + toDate);
     if (fromDate.equals(toDate) || fromDate.after(toDate)) {
-      return;
+      return true;
     }
 
     UpdatePeriod interval = fact.maxIntervalInRange(fromDate, toDate);
     if (interval == null) {
-      throw new SemanticException("Could not find a partition for given range:"
-          + fromDate + "-" + toDate);
+      System.err.println("Could not find a partition for given range:"
+          + fromDate + "-" + toDate + " in fact:" + fact.getName());
+      return false;
     }
 
-    System.out.println("fact: " + fact.getName() + " max interval:" + interval);
     Date ceilFromDate = DateUtil.getCeilDate(fromDate, interval);
     Date floorToDate = DateUtil.getFloorDate(toDate, interval);
     List<String> partitions = fact.getPartitions(ceilFromDate, floorToDate,
@@ -65,9 +68,7 @@ public class PartitionResolver implement
       }
       parts.addAll(partitions);
     }
-    System.out.println("ceilFromDate for " + interval + " is " + ceilFromDate);
-    System.out.println("floorToDate for " + interval + " is " + floorToDate);
-    getPartitions(fact, fromDate, ceilFromDate, partitionColMap);
-    getPartitions(fact, floorToDate, toDate, partitionColMap);
+    return (getPartitions(fact, fromDate, ceilFromDate, partitionColMap, cubeql)
+        && getPartitions(fact, floorToDate, toDate, partitionColMap, cubeql));
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Thu May  2 08:39:27 2013
@@ -96,6 +96,7 @@ public class CubeTestSetup {
     Map<Storage, List<UpdatePeriod>> storageAggregatePeriods =
         new HashMap<Storage, List<UpdatePeriod>>();
     List<UpdatePeriod> updates  = new ArrayList<UpdatePeriod>();
+    updates.add(UpdatePeriod.MINUTELY);
     updates.add(UpdatePeriod.HOURLY);
     updates.add(UpdatePeriod.DAILY);
     updates.add(UpdatePeriod.MONTHLY);
@@ -158,7 +159,7 @@ public class CubeTestSetup {
     Map<Storage, UpdatePeriod> snapshotDumpPeriods =
         new HashMap<Storage, UpdatePeriod>();
     snapshotDumpPeriods.put(hdfsStorage1, UpdatePeriod.HOURLY);
-    snapshotDumpPeriods.put(hdfsStorage2, UpdatePeriod.MINUTELY);
+    snapshotDumpPeriods.put(hdfsStorage2, null);
     client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
         snapshotDumpPeriods);
   }
@@ -169,7 +170,7 @@ public class CubeTestSetup {
     createCube(client);
     createCubeFact(client);
     createDimWithTwoStorages(client);
-   // createCubeFactMonthly(client);
+    createCubeFactMonthly(client);
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1478299&r1=1478298&r2=1478299&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Thu May  2 08:39:27 2013
@@ -17,10 +17,25 @@ public class TestCubeDriver {
   private Configuration conf;
   private CubeDriver driver;
 
+  static Date now;
+  static Date twodaysBack;
+  static Date twoMonthsBack;
+
   @BeforeClass
   public static void setup() throws Exception {
     CubeTestSetup setup = new CubeTestSetup();
     setup.createSources();
+    Calendar cal = Calendar.getInstance();
+    now = cal.getTime();
+    System.out.println("Test now:" + now);
+    cal.add(Calendar.DAY_OF_MONTH, -2);
+    twodaysBack = cal.getTime();
+    System.out.println("Test twodaysBack:" + twodaysBack);
+    cal = Calendar.getInstance();
+    cal.add(Calendar.MONTH, -2);
+    twoMonthsBack = cal.getTime();
+    System.out.println("Test twoMonthsBack:" + twoMonthsBack);
+
   }
 
   public static String HOUR_FMT = "yyyy-MM-dd HH";
@@ -53,15 +68,26 @@ public class TestCubeDriver {
   }
 
   @Test
+  public void testCandidateTables() throws Exception {
+    conf = new Configuration();
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    Throwable th = null;
+    try {
+      String hqlQuery = driver.compileCubeQuery("select dim1, SUM(msr2)" +
+        " from testCube" +
+        " where time_range_in('" + getDateUptoHours(twodaysBack)
+        + "','" + getDateUptoHours(now) + "')");
+    } catch (SemanticException e) {
+      th = e;
+      e.printStackTrace();
+    }
+    Assert.assertNotNull(th);
+  }
+
+  @Test
   public void testCubeWhereQuery() throws Exception {
     conf = new Configuration();
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-    Calendar cal = Calendar.getInstance();
-    Date now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.DAY_OF_MONTH, -2);
-    Date twodaysBack = cal.getTime();
-    System.out.println("Test twodaysBack:" + twodaysBack);
     System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
     //String expected = "select SUM(testCube.msr2) from "
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
@@ -75,13 +101,6 @@ public class TestCubeDriver {
   public void testCubeJoinQuery() throws Exception {
     conf = new Configuration();
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-    Calendar cal = Calendar.getInstance();
-    Date now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.DAY_OF_MONTH, -2);
-    Date twodaysBack = cal.getTime();
-    System.out.println("Test twodaysBack:" + twodaysBack);
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
     //String expected = "select SUM(testCube.msr2) from "
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
         + " join citytable on testCube.cityid = citytable.id"
@@ -156,13 +175,6 @@ public class TestCubeDriver {
   public void testCubeQueryWithAilas() throws Exception {
     conf = new Configuration();
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-    Calendar cal = Calendar.getInstance();
-    Date now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.DAY_OF_MONTH, -2);
-    Date twodaysBack = cal.getTime();
-    System.out.println("Test twodaysBack:" + twodaysBack);
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
@@ -204,12 +216,6 @@ public class TestCubeDriver {
   public void testCubeWhereQueryForMonth() throws Exception {
     conf = new Configuration();
     driver = new CubeDriver(new HiveConf(new Configuration(), HiveConf.class));
-    Calendar cal = Calendar.getInstance();
-    Date now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.MONTH, -2);
-    Date twoMonthsBack = cal.getTime();
-    System.out.println("Test twoMonthsBack:" + twoMonthsBack);
     System.out.println("Test from:" + getDateUptoHours(twoMonthsBack) + " to:" + getDateUptoHours(now));
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
         " where time_range_in('" + getDateUptoHours(twoMonthsBack)
@@ -222,7 +228,7 @@ public class TestCubeDriver {
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
         " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
         + "','" + getDateUptoMonth(now) + "')");
-
+    System.out.println("cube hql:" + hqlQuery);
   }
 
   @Test