You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/05/27 10:00:09 UTC

svn commit: r1486532 - in /hive/branches/HIVE-4115: common/src/java/org/apache/hadoop/hive/conf/ ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ ql/src/java/org/apache/hadoop/hive/ql/cube/processor...

Author: amareshwari
Date: Mon May 27 08:00:08 2013
New Revision: 1486532

URL: http://svn.apache.org/r1486532
Log:
Add check for partition existence while resolving partitions to be queried

Modified:
    hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryConstants.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/HIVE-4115/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon May 27 08:00:08 2013
@@ -775,8 +775,6 @@ public class HiveConf extends Configurat
       "org.apache.hadoop.hive.ql.exec.PTFPersistence$PartitionedByteBasedList"),
     HIVE_PTF_PARTITION_PERSISTENT_SIZE("hive.ptf.partition.persistence.memsize",
       (int) Math.pow(2, (5 + 10 + 10)) ), // 32MB
-
-    HIVE_DRIVER_SUPPORTED_STORAGES("hive.driver.supported.storages", null);
     ;
 
     public final String varname;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Mon May 27 08:00:08 2013
@@ -6,7 +6,6 @@ import java.util.Arrays;
 import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -141,7 +140,8 @@ public final class CubeFactTable extends
   }
 
 
-  public UpdatePeriod maxIntervalInRange(Date from, Date to) {
+  public static UpdatePeriod maxIntervalInRange(Date from, Date to,
+      Set<UpdatePeriod> updatePeriods) {
     UpdatePeriod max = null;
 
     long diff = to.getTime() - from.getTime();
@@ -149,17 +149,13 @@ public final class CubeFactTable extends
       return null;
     }
 
-    Set<UpdatePeriod> updatePeriods = new HashSet<UpdatePeriod>();
-
-    for (List<UpdatePeriod> value : storageUpdatePeriods.values()) {
-      updatePeriods.addAll(value);
-    }
-
     // Use weight only till UpdatePeriod.DAILY
-    // Above Daily, check if at least one full update period is present between the dates
+    // Above Daily, check if at least one full update period is present
+    // between the dates
     UpdatePeriodComparator cmp = new UpdatePeriodComparator();
     for (UpdatePeriod i : updatePeriods) {
-      if (UpdatePeriod.YEARLY == i || UpdatePeriod.QUARTERLY == i || UpdatePeriod.MONTHLY == i) {
+      if (UpdatePeriod.YEARLY == i || UpdatePeriod.QUARTERLY == i
+          || UpdatePeriod.MONTHLY == i) {
         int intervals = 0;
         switch (i) {
         case YEARLY:

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java Mon May 27 08:00:08 2013
@@ -86,7 +86,7 @@ public class CubeMetastoreClient {
 
   private void createFactStorage(String factName, Storage storage,
       StorageDescriptor parentSD, List<UpdatePeriod> updatePeriods)
-      throws HiveException {
+          throws HiveException {
     for (UpdatePeriod updatePeriod : updatePeriods) {
       createFactStorageUpdatePeriod(factName, storage, parentSD, updatePeriod);
     }
@@ -94,7 +94,7 @@ public class CubeMetastoreClient {
 
   private void createFactStorageUpdatePeriod(String factName, Storage storage,
       StorageDescriptor parentSD, UpdatePeriod updatePeriod)
-      throws HiveException {
+          throws HiveException {
     String storageTblName = MetastoreUtil.getFactStorageTableName(factName,
         updatePeriod, storage.getPrefix());
     createStorage(storageTblName, storage, parentSD);
@@ -102,7 +102,7 @@ public class CubeMetastoreClient {
 
   private void createDimStorage(String dimName, Storage storage,
       StorageDescriptor parentSD)
-      throws HiveException {
+          throws HiveException {
     String storageTblName = MetastoreUtil.getDimStorageTableName(dimName,
         storage.getPrefix());
     createStorage(storageTblName, storage, parentSD);
@@ -156,7 +156,7 @@ public class CubeMetastoreClient {
   public void createCubeFactTable(String cubeName, String factName,
       List<FieldSchema> columns,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods, double weight)
-      throws HiveException {
+          throws HiveException {
     CubeFactTable factTable = new CubeFactTable(cubeName, factName, columns,
         getUpdatePeriods(storageAggregatePeriods), weight);
     createCubeTable(factTable, storageAggregatePeriods);
@@ -216,7 +216,7 @@ public class CubeMetastoreClient {
 
   public void createCubeTable(CubeFactTable factTable,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
-      throws HiveException {
+          throws HiveException {
     // create virtual cube table in metastore
     StorageDescriptor sd = createCubeHiveTable(factTable);
 
@@ -291,7 +291,7 @@ public class CubeMetastoreClient {
 
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp)
-      throws HiveException {
+          throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         table.getName(), updatePeriod, storage.getPrefix());
     addPartition(storageTableName, storage, getPartitionSpec(updatePeriod,
@@ -301,7 +301,7 @@ public class CubeMetastoreClient {
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp,
       Map<String, String> partSpec)
-      throws HiveException {
+          throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         table.getName(), updatePeriod, storage.getPrefix());
     partSpec.putAll(getPartitionSpec(updatePeriod,
@@ -352,23 +352,24 @@ public class CubeMetastoreClient {
   boolean factPartitionExists(CubeFactTable fact,
       Storage storage, UpdatePeriod updatePeriod,
       Date partitionTimestamp, Map<String, String> partSpec)
-      throws HiveException {
+          throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         fact.getName(), updatePeriod, storage.getPrefix());
     return partitionExists(storageTableName, updatePeriod, partitionTimestamp,
         partSpec);
   }
 
-  boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
+  public boolean partitionExists(String storageTableName,
+      UpdatePeriod updatePeriod,
       Date partitionTimestamp)
-      throws HiveException {
+          throws HiveException {
     return partitionExists(storageTableName,
         getPartitionSpec(updatePeriod, partitionTimestamp));
   }
 
   boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
       Date partitionTimestamp, Map<String, String> partSpec)
-      throws HiveException {
+          throws HiveException {
     partSpec.putAll(getPartitionSpec(updatePeriod, partitionTimestamp));
     return partitionExists(storageTableName, partSpec);
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java Mon May 27 08:00:08 2013
@@ -12,9 +12,9 @@ public enum UpdatePeriod implements Name
   MINUTELY(Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
   HOURLY(Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
   DAILY(Calendar.DAY_OF_MONTH, 24 * HOURLY.weight(), "yyyy-MM-dd"),
-  WEEKLY(Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
+  WEEKLY(Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "YYYY-'W'ww"),
   MONTHLY(Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
-  QUARTERLY(Calendar.MONTH, 3 * MONTHLY.weight(), "yyyy-M"),
+  QUARTERLY(Calendar.MONTH, 3 * MONTHLY.weight(), "yyyy-MM"),
   YEARLY(Calendar.YEAR, 12 * MONTHLY.weight(), "yyyy");
 
   public static final long MIN_INTERVAL = SECONDLY.weight();

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryConstants.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryConstants.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryConstants.java Mon May 27 08:00:08 2013
@@ -3,7 +3,13 @@ package org.apache.hadoop.hive.ql.cube.p
 public interface CubeQueryConstants {
   public static final String VALID_FACT_TABLES = "cube.query.valid.fact.tables";
   public static final String VALID_STORAGE_FACT_TABLES = "cube.query.valid." +
-  		"fact.storagetables";
+      "fact.storagetables";
   public static final String VALID_STORAGE_DIM_TABLES = "cube.query.valid." +
-  		"dim.storgaetables";
+      "dim.storgaetables";
+  public static final String DRIVER_SUPPORTED_STORAGES = "cube.query.driver." +
+  		"supported.storages";
+  public static final String FAIL_QUERY_ON_PARTIAL_DATA =
+      "cube.query.fail.if.data.partial";
+  public static final String NON_EXISTING_PARTITIONS =
+      "cube.query.nonexisting.partitions";
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Mon May 27 08:00:08 2013
@@ -91,8 +91,6 @@ public class CubeQueryContext {
   protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
   factPartitionMap =
       new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
-  private List<String> supportedStorages;
-  private boolean allStoragesSupported;
   private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
   factStorageMap =
       new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
@@ -117,6 +115,7 @@ public class CubeQueryContext {
   private ASTNode whereAST;
   private ASTNode orderByAST;
   private ASTNode groupByAST;
+  private CubeMetastoreClient client;
 
   public CubeQueryContext(ASTNode ast, QB qb, HiveConf conf)
       throws SemanticException {
@@ -124,6 +123,11 @@ public class CubeQueryContext {
     this.qb = qb;
     this.conf = conf;
     this.clauseName = getClause();
+    try {
+      client = CubeMetastoreClient.getInstance(conf);
+    } catch (HiveException e) {
+      throw new SemanticException(e);
+    }
     if (qb.getParseInfo().getWhrForClause(clauseName) != null) {
       this.whereTree = HQLParser.getString(
           qb.getParseInfo().getWhrForClause(clauseName)).toLowerCase();
@@ -169,7 +173,6 @@ public class CubeQueryContext {
 
   private void extractMetaTables() throws SemanticException {
     try {
-      CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
       List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
       for (String alias : tabAliases) {
         String tblName = qb.getTabNameForAlias(alias);
@@ -705,15 +708,6 @@ public class CubeQueryContext {
     this.factPartitionMap.putAll(factPartitionMap);
   }
 
-  public List<String> getSupportedStorages() {
-    return supportedStorages;
-  }
-
-  public void setSupportedStorages(List<String> supportedStorages) {
-    this.supportedStorages = supportedStorages;
-    this.allStoragesSupported = (supportedStorages == null);
-  }
-
   private final String baseQueryFormat = "SELECT %s FROM %s";
 
   String getQueryFormat() {
@@ -967,13 +961,6 @@ public class CubeQueryContext {
     return !storageTableToWhereClause.isEmpty();
   }
 
-  public boolean isStorageSupported(String storage) {
-    if (!allStoragesSupported) {
-      return supportedStorages.contains(storage);
-    }
-    return true;
-  }
-
   public Map<String, List<String>> getTblToColumns() {
     return tblAliasToColumns;
   }
@@ -1091,5 +1078,7 @@ public class CubeQueryContext {
     return groupByAST;
   }
 
-
+  public CubeMetastoreClient getMetastoreClient() {
+    return client;
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Mon May 27 08:00:08 2013
@@ -11,54 +11,38 @@ import org.apache.hadoop.hive.ql.parse.S
 
 public class CubeQueryRewriter {
   private final Configuration conf;
-  private final List<ContextRewriter> phase1Rewriters =
+  private final List<ContextRewriter> rewriters =
       new ArrayList<ContextRewriter>();
-  private final List<ContextRewriter> phase2Rewriters =
-      new ArrayList<ContextRewriter>();
-
   public CubeQueryRewriter(Configuration conf) {
     this.conf = conf;
-    setupPhase1Rewriters();
-    setupPhase2Rewriters();
+    setupRewriters();
   }
 
-  private void setupPhase1Rewriters() {
+  private void setupRewriters() {
     // Resolve joins and generate base join tree
-    phase1Rewriters.add(new JoinResolver(conf));
+    rewriters.add(new JoinResolver(conf));
     // Rewrite base trees (groupby, having, orderby, limit) using aliases
-    phase1Rewriters.add(new AliasReplacer(conf));
+    rewriters.add(new AliasReplacer(conf));
     // Resolve aggregations and generate base select tree
-    phase1Rewriters.add(new AggregateResolver(conf));
-    phase1Rewriters.add(new GroupbyResolver(conf));
-    // Resolve partition columns and table names
-    phase1Rewriters.add(new PartitionResolver(conf));
-  }
-
-  private void setupPhase2Rewriters() {
-    phase2Rewriters.add(new StorageTableResolver(conf));
-    phase2Rewriters.add(new LeastPartitionResolver(conf));
-    phase2Rewriters.add(new LightestFactResolver(conf));
-    phase2Rewriters.add(new LeastDimensionResolver(conf));
+    rewriters.add(new AggregateResolver(conf));
+    rewriters.add(new GroupbyResolver(conf));
+    // Resolve storage partitions and table names
+    rewriters.add(new StorageTableResolver(conf));
+    rewriters.add(new LeastPartitionResolver(conf));
+    rewriters.add(new LightestFactResolver(conf));
+    rewriters.add(new LeastDimensionResolver(conf));
   }
 
-  public CubeQueryContext rewritePhase1(ASTNode astnode)
+  public CubeQueryContext rewrite(ASTNode astnode)
       throws SemanticException, ParseException {
     CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(
         new HiveConf(conf, HiveConf.class));
     analyzer.analyzeInternal(astnode);
     CubeQueryContext ctx = analyzer.getQueryContext();
-    rewrite(phase1Rewriters, ctx);
+    rewrite(rewriters, ctx);
     return ctx;
   }
 
-  public CubeQueryContext rewritePhase2(CubeQueryContext cubeql,
-      List<String> storages) throws SemanticException {
-    cubeql.setSupportedStorages(storages);
-    rewrite(phase2Rewriters, cubeql);
-    return cubeql;
-  }
-
-
   private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx)
       throws SemanticException {
     for (ContextRewriter rewriter : rewriters) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Mon May 27 08:00:08 2013
@@ -1,10 +1,7 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
 import java.util.ArrayList;
-import java.util.Calendar;
 import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -24,7 +21,7 @@ public class PartitionResolver implement
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
-    if (!cubeql.getCandidateFactTables().isEmpty()) {
+    /*if (!cubeql.getCandidateFactTables().isEmpty()) {
       Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
           new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
       Date fromDate = cubeql.getFromDate();
@@ -37,7 +34,7 @@ public class PartitionResolver implement
         CubeFactTable fact = i.next();
         Map<UpdatePeriod, List<String>> partitionColMap =
             new HashMap<UpdatePeriod, List<String>>();
-        if (!getPartitions(fact, fromDate, toDate, partitionColMap, cubeql)) {
+        if (!getPartitions(fact, fromDate, toDate, partitionColMap)) {
           i.remove();
         } else {
           factPartitionMap.put(fact, partitionColMap);
@@ -45,17 +42,17 @@ public class PartitionResolver implement
       }
       // set partition cols map in cubeql
       cubeql.setFactPartitionMap(factPartitionMap);
-    }
+    } */
   }
 
   private boolean getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
-      Map<UpdatePeriod, List<String>> partitionColMap, CubeQueryContext cubeql)
+      Map<UpdatePeriod, List<String>> partitionColMap)
       throws SemanticException {
     if (fromDate.equals(toDate) || fromDate.after(toDate)) {
       return true;
     }
 
-    UpdatePeriod interval = fact.maxIntervalInRange(fromDate, toDate);
+    UpdatePeriod interval = fact.maxIntervalInRange(fromDate, toDate, null);
     if (interval == null) {
       LOG.info("Could not find partition for given range:"
           + fromDate + "-" + toDate + " in fact:" + fact.getName());
@@ -74,7 +71,7 @@ public class PartitionResolver implement
       }
       parts.addAll(partitions);
     }
-    return (getPartitions(fact, fromDate, ceilFromDate, partitionColMap, cubeql)
-    && getPartitions(fact, floorToDate, toDate, partitionColMap, cubeql));
+    return (getPartitions(fact, fromDate, ceilFromDate, partitionColMap)
+    && getPartitions(fact, floorToDate, toDate, partitionColMap));
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java Mon May 27 08:00:08 2013
@@ -1,11 +1,16 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
+import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -13,98 +18,95 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeDimensionTable;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
 import org.apache.hadoop.hive.ql.cube.metadata.MetastoreUtil;
 import org.apache.hadoop.hive.ql.cube.metadata.Storage;
 import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class StorageTableResolver implements ContextRewriter {
-  public static Log LOG = LogFactory.getLog(StorageTableResolver.class.getName());
+  public static Log LOG = LogFactory.getLog(
+      StorageTableResolver.class.getName());
 
   private final Configuration conf;
+  private final List<String> supportedStorages;
+  private final boolean allStoragesSupported;
+  CubeMetastoreClient client;
+  private final boolean failOnPartialData;
+  private final List<String> validFactStorageTables;
+  private final List<String> validDimTables;
+  private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
+  factStorageMap =
+  new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
+  private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
+  factPartMap =
+  new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
+  private final Map<CubeDimensionTable, List<String>> dimStorageMap =
+      new HashMap<CubeDimensionTable, List<String>>();
+  private final Map<String, String> storageTableToWhereClause =
+      new HashMap<String, String>();
+  private final List<String> nonExistingParts = new ArrayList<String>();
+
   public StorageTableResolver(Configuration conf) {
     this.conf = conf;
+    this.supportedStorages = getSupportedStorages(conf);
+    this.allStoragesSupported = (supportedStorages == null);
+    this.failOnPartialData = conf.getBoolean(
+        CubeQueryConstants.FAIL_QUERY_ON_PARTIAL_DATA, false);
     String str = conf.get(CubeQueryConstants.VALID_STORAGE_FACT_TABLES);
+    validFactStorageTables = StringUtils.isBlank(str) ? null :
+      Arrays.asList(StringUtils.split(str.toLowerCase()));
+    str = conf.get(CubeQueryConstants.VALID_STORAGE_DIM_TABLES);
+    validDimTables = StringUtils.isBlank(str) ? null :
+      Arrays.asList(StringUtils.split(str.toLowerCase()));
+  }
+
+  private List<String> getSupportedStorages(Configuration conf) {
+    String[] storages = conf.getStrings(
+        CubeQueryConstants.DRIVER_SUPPORTED_STORAGES);
+    if (storages != null) {
+      return Arrays.asList(storages);
+    }
+    return null;
   }
 
+  public boolean isStorageSupported(String storage) {
+    if (!allStoragesSupported) {
+      return supportedStorages.contains(storage);
+    }
+    return true;
+  }
+
+  Map<String, List<String>> storagePartMap =
+      new HashMap<String, List<String>>();
+
   @Override
   public void rewriteContext(CubeQueryContext cubeql)
       throws SemanticException {
-    Map<String, String> storageTableToWhereClause =
-        new HashMap<String, String>();
 
-    // resolve fact tables
-    Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
-        new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
-    Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartMap =
-        cubeql.getFactPartitionMap();
-    String str = conf.get(CubeQueryConstants.VALID_STORAGE_FACT_TABLES);
-    List<String> validFactStorageTables = StringUtils.isBlank(str) ? null :
-      Arrays.asList(StringUtils.split(str.toLowerCase()));
-
-    // Find candidate tables wrt supported storages
-    for (CubeFactTable fact : factPartMap.keySet()) {
-      Map<UpdatePeriod, List<String>> storageTableMap =
-          new HashMap<UpdatePeriod, List<String>>();
-      factStorageMap.put(fact, storageTableMap);
-      Map<UpdatePeriod, List<String>> partitionColMap = factPartMap.get(fact);
-      for (UpdatePeriod updatePeriod : partitionColMap.keySet()) {
-        List<String> storageTables = new ArrayList<String>();
-        storageTableMap.put(updatePeriod, storageTables);
-        List<String> parts = partitionColMap.get(updatePeriod);
-        for (String storage : fact.getStorages()) {
-          if (cubeql.isStorageSupported(storage)) {
-            String tableName = MetastoreUtil.getFactStorageTableName(
-                fact.getName(), updatePeriod, Storage.getPrefix(storage))
-                .toLowerCase();
-            if (validFactStorageTables != null && !validFactStorageTables
-                .contains(tableName)) {
-              LOG.info("Not considering the fact storage table:" + tableName
-                  + " as it is not a valid fact storage");
-              continue;
-            }
-            storageTables.add(tableName);
-            storageTableToWhereClause.put(tableName, getWherePartClause(
-                cubeql.getAliasForTabName(fact.getCubeName()), parts));
-          } else {
-            LOG.info("Storage:" + storage + " is not supported");
-          }
-        }
-      }
+    client = cubeql.getMetastoreClient();
+    if (!cubeql.getCandidateFactTables().isEmpty()) {
+      // resolve storage table names
+      resolveFactStorageTableNames(cubeql);
+      cubeql.setFactStorageMap(factStorageMap);
+
+      // resolve storage partitions
+      resolveFactStoragePartitions(cubeql);
+      cubeql.setFactPartitionMap(factPartMap);
     }
-    cubeql.setFactStorageMap(factStorageMap);
-    for (Iterator<CubeFactTable> i =
-        cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
-      CubeFactTable fact = i.next();
-      Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(
-          fact);
-      Map<UpdatePeriod, List<String>> partColMap = cubeql.getFactPartitionMap()
-          .get(fact);
-      Iterator<UpdatePeriod> it = partColMap.keySet().iterator();
-      while (it.hasNext()) {
-        UpdatePeriod updatePeriod = it.next();
-        if (storageTableMap.get(updatePeriod) == null ||
-            storageTableMap.get(updatePeriod).isEmpty()) {
-          LOG.info("Removing fact:" + fact +
-              " from candidate fact tables, as it does not have storage tables"
-              + " for update period" + updatePeriod);
-          i.remove();
-          break;
-        }
-      }
-    }
-
     // resolve dimension tables
-    Map<CubeDimensionTable, List<String>> dimStorageMap =
-        new HashMap<CubeDimensionTable, List<String>>();
-    str = conf.get(CubeQueryConstants.VALID_STORAGE_DIM_TABLES);
-    List<String> validDimTables = StringUtils.isBlank(str) ? null :
-      Arrays.asList(StringUtils.split(str.toLowerCase()));
+    resolveDimStorageTablesAndPartitions(cubeql);
+    cubeql.setDimStorageMap(dimStorageMap);
+
+    // set storage to whereclause
+    cubeql.setStorageTableToWhereClause(storageTableToWhereClause);
+  }
+
+  private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) {
     for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
-      List<String> storageTables = new ArrayList<String>();
-      dimStorageMap.put(dim, storageTables);
       for (String storage : dim.getStorages()) {
-        if (cubeql.isStorageSupported(storage)) {
+        if (isStorageSupported(storage)) {
           String tableName = MetastoreUtil.getDimStorageTableName(
               dim.getName(), Storage.getPrefix(storage)).toLowerCase();
           if (validDimTables != null && !validDimTables.contains(tableName)) {
@@ -112,6 +114,11 @@ public class StorageTableResolver implem
                 + " as it is not a valid dim storage");
             continue;
           }
+          List<String> storageTables = dimStorageMap.get(dim);
+          if (storageTables == null) {
+            storageTables = new ArrayList<String>();
+            dimStorageMap.put(dim, storageTables);
+          }
           storageTables.add(tableName);
           if (dim.hasStorageSnapshots(storage)) {
             storageTableToWhereClause.put(tableName,
@@ -123,11 +130,179 @@ public class StorageTableResolver implem
         }
       }
     }
-    cubeql.setDimStorageMap(dimStorageMap);
-    cubeql.setStorageTableToWhereClause(storageTableToWhereClause);
   }
 
-  private String getWherePartClause(String tableName, List<String> parts) {
+  private void resolveFactStorageTableNames(CubeQueryContext cubeql) {
+    for (Iterator<CubeFactTable> i =
+        cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+      CubeFactTable fact = i.next();
+      Map<UpdatePeriod, List<String>> storageTableMap =
+          new HashMap<UpdatePeriod, List<String>>();
+      factStorageMap.put(fact, storageTableMap);
+      for (Map.Entry<String, List<UpdatePeriod>> entry : fact
+          .getUpdatePeriods().entrySet()) {
+        String storage = entry.getKey();
+        // skip storages that are not supported
+        if (!isStorageSupported(storage)) {
+          continue;
+        }
+        for (UpdatePeriod updatePeriod : entry.getValue()) {
+          String tableName;
+          // skip the update period if the storage is not valid
+          if ((tableName = getStorageTableName(fact, updatePeriod, storage))
+              == null) {
+            continue;
+          }
+          List<String> storageTables = storageTableMap.get(updatePeriod);
+          if (storageTables == null) {
+            storageTables = new ArrayList<String>();
+            storageTableMap.put(updatePeriod, storageTables);
+          }
+          storageTables.add(tableName);
+        }
+      }
+      if (storageTableMap.isEmpty()) {
+        LOG.info("Not considering the fact table:" + fact + " as it does not" +
+            " have any storage tables");
+        i.remove();
+      }
+    }
+  }
+
+  private void resolveFactStoragePartitions(CubeQueryContext cubeql)
+      throws SemanticException {
+    Date fromDate = cubeql.getFromDate();
+    Date toDate = cubeql.getToDate();
+
+    // Find candidate tables wrt supported storages
+    for (Iterator<CubeFactTable> i =
+        cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+      CubeFactTable fact = i.next();
+      Map<UpdatePeriod, List<String>> partitionColMap = getPartitionColMap(fact,
+          fromDate, toDate);
+      if (partitionColMap == null) {
+        LOG.info("Not considering the fact table:" + fact + " as it could not" +
+            " find partition for given range:" + fromDate + " - " + toDate);
+        i.remove();
+        continue;
+      }
+      factPartMap.put(fact, partitionColMap);
+      Map<UpdatePeriod, List<String>> storageTblMap = factStorageMap.get(fact);
+      for (UpdatePeriod updatePeriod : partitionColMap.keySet()) {
+        List<String> parts = partitionColMap.get(updatePeriod);
+        LOG.info("For fact:" + fact + " updatePeriod:" + updatePeriod
+            + " Parts:" + parts + " storageTables:"
+            + storageTblMap.get(updatePeriod));
+        for (String storageTableName : storageTblMap.get(updatePeriod)) {
+          storageTableToWhereClause.put(storageTableName, getWherePartClause(
+              cubeql.getAliasForTabName(fact.getCubeName()), parts));
+        }
+      }
+    }
+  }
+
+  private Map<UpdatePeriod, List<String>> getPartitionColMap(CubeFactTable fact,
+      Date fromDate, Date toDate)
+          throws SemanticException {
+    Map<UpdatePeriod, List<String>> partitionColMap =
+        new HashMap<UpdatePeriod, List<String>>();
+    Set<UpdatePeriod> updatePeriods = factStorageMap.get(fact).keySet();
+    try {
+      if (!getPartitions(fact, fromDate, toDate, partitionColMap,
+          updatePeriods, true)) {
+        return null;
+      }
+    } catch (HiveException e) {
+      new SemanticException(e);
+    }
+    return partitionColMap;
+  }
+
+  String getStorageTableName(CubeFactTable fact, UpdatePeriod updatePeriod,
+      String storage) {
+    String tableName = MetastoreUtil.getFactStorageTableName(
+        fact.getName(), updatePeriod, Storage.getPrefix(storage))
+        .toLowerCase();
+    if (validFactStorageTables != null && !validFactStorageTables
+        .contains(tableName)) {
+      return null;
+    }
+    return tableName;
+  }
+
+  private boolean getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
+      Map<UpdatePeriod, List<String>> partitionColMap,
+      Set<UpdatePeriod> updatePeriods, boolean addNonExistingParts)
+          throws HiveException {
+    LOG.info("getPartitions for " + fact + " from fromDate:" + fromDate + " toDate:" + toDate);
+    if (fromDate.equals(toDate) || fromDate.after(toDate)) {
+      return true;
+    }
+
+    UpdatePeriod interval = CubeFactTable.maxIntervalInRange(fromDate, toDate,
+        updatePeriods);
+    LOG.info("Max interval for " + fact + interval);
+    if (interval == null) {
+      return false;
+    }
+
+    Date ceilFromDate = DateUtil.getCeilDate(fromDate, interval);
+    Date floorToDate = DateUtil.getFloorDate(toDate, interval);
+    List<String> storageTbls = factStorageMap.get(fact).get(interval);
+
+    // add partitions from ceilFrom to floorTo
+    String fmt = interval.format();
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(ceilFromDate);
+    List<String> partitions = new ArrayList<String>();
+    Date dt = cal.getTime();
+    while (dt.compareTo(floorToDate) < 0) {
+      String part = new SimpleDateFormat(fmt).format(cal.getTime());
+      cal.add(interval.calendarField(), 1);
+      boolean foundPart = false;
+      for (String storageTableName : storageTbls) {
+        if (client.partitionExists(storageTableName,
+            interval, dt)) {
+          partitions.add(part);
+          foundPart = true;
+          break;
+        }
+      }
+      if (!foundPart) {
+        LOG.info("Partition:" + part + " does not exist in any storage table");
+        Set<UpdatePeriod> newset = new HashSet<UpdatePeriod>();
+        newset.addAll(updatePeriods);
+        newset.remove(interval);
+        if (!getPartitions(fact, dt, cal.getTime(),
+            partitionColMap, newset, false)) {
+          if (!failOnPartialData && addNonExistingParts) {
+            LOG.info("Adding non existing partition" + part);
+            partitions.add(part);
+            nonExistingParts.add(part);
+            foundPart = true;
+          } else {
+            LOG.info("No finer granual partitions exist for" + part);
+            return false;
+          }
+        } else {
+          LOG.info("Finer granual partitions added for " + part);
+        }
+      }
+      dt = cal.getTime();
+    }
+    List<String> parts = partitionColMap.get(interval);
+    if (parts == null) {
+      parts = new ArrayList<String>();
+      partitionColMap.put(interval, parts);
+    }
+    parts.addAll(partitions);
+    return (getPartitions(fact, fromDate, ceilFromDate, partitionColMap,
+        updatePeriods, addNonExistingParts) && getPartitions(fact, floorToDate,
+            toDate, partitionColMap, updatePeriods, addNonExistingParts));
+  }
+
+  private static String getWherePartClause(String tableName,
+      List<String> parts) {
     if (parts.size() == 0) {
       return "";
     }
@@ -151,5 +326,4 @@ public class StorageTableResolver implem
     partStr.append("'");
     return partStr.toString();
   }
-
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java Mon May 27 08:00:08 2013
@@ -1,8 +1,6 @@
 package org.apache.hadoop.hive.ql.cube.processors;
 
 import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
@@ -65,23 +63,11 @@ public class CubeDriver extends Driver {
     }
     // compile the cube query and rewrite it to HQL query
     CubeQueryRewriter rewriter = new CubeQueryRewriter(getConf());
-    // 1. rewrite query to get summary tables and joins
-    CubeQueryContext phase1Query = rewriter.rewritePhase1(tree);
-    CubeQueryContext finalQuery = rewriter.rewritePhase2(phase1Query,
-        getSupportedStorages(getConf()));
-    String hql = finalQuery.toHQL();
+    CubeQueryContext rewrittenQuery = rewriter.rewrite(tree);
+    String hql = rewrittenQuery.toHQL();
     if (explain) {
       hql = "EXPLAIN " + hql;
     }
     return hql;
   }
-
-  private List<String> getSupportedStorages(HiveConf conf) {
-    String[] storages = conf.getStrings(
-        HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString());
-    if (storages != null) {
-      return Arrays.asList(storages);
-    }
-    return null;
-  }
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Mon May 27 08:00:08 2013
@@ -2,6 +2,8 @@ package org.apache.hadoop.hive.ql.cube.p
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -36,6 +38,22 @@ public class CubeTestSetup {
   private Set<CubeMeasure> cubeMeasures;
   private Set<CubeDimension> cubeDimensions;
   private final String cubeName = "testCube";
+  public static Date now;
+  public static Date twodaysBack;
+  public static Date twoMonthsBack;
+
+  static {
+    Calendar cal = Calendar.getInstance();
+    now = cal.getTime();
+    System.out.println("Test now:" + now);
+    cal.add(Calendar.DAY_OF_MONTH, -2);
+    twodaysBack = cal.getTime();
+    System.out.println("Test twodaysBack:" + twodaysBack);
+    cal = Calendar.getInstance();
+    cal.add(Calendar.MONTH, -2);
+    twoMonthsBack = cal.getTime();
+    System.out.println("Test twoMonthsBack:" + twoMonthsBack);
+  }
 
   private void createCube(CubeMetastoreClient client) throws HiveException {
     cubeMeasures = new HashSet<CubeMeasure>();
@@ -139,10 +157,7 @@ public class CubeTestSetup {
     Map<Storage, List<UpdatePeriod>> storageAggregatePeriods =
         new HashMap<Storage, List<UpdatePeriod>>();
     List<UpdatePeriod> updates  = new ArrayList<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
     updates.add(UpdatePeriod.WEEKLY);
-    updates.add(UpdatePeriod.MONTHLY);
     Storage hdfsStorage = new HDFSStorage("C1",
         TextInputFormat.class.getCanonicalName(),
         HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
@@ -178,6 +193,18 @@ public class CubeTestSetup {
     // create cube fact
     client.createCubeFactTable(cubeName, factName, factColumns,
         storageAggregatePeriods, 0L);
+    CubeFactTable fact2 = client.getFactTable(factName);
+    // Add all hourly partitions for two days
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(twodaysBack);
+    Date temp = cal.getTime();
+    while (!(temp.after(now))) {
+      System.out.println("Adding partition" + temp);
+      client.addPartition(fact2, hdfsStorage,
+        UpdatePeriod.HOURLY, temp);
+      cal.add(Calendar.HOUR_OF_DAY, 1);
+      temp = cal.getTime();
+    }
   }
 
   private void createCubeFactMonthly(CubeMetastoreClient client)
@@ -312,7 +339,8 @@ public class CubeTestSetup {
         new HiveConf(this.getClass()));
     createCube(client);
     createCubeFact(client);
-    createCubeFactWeekly(client);
+    // commenting this as the week date format throws IllegalPatternException
+    //createCubeFactWeekly(client);
     createCubeFactOnlyHourly(client);
     createCityTbale(client);
     createCubeFactMonthly(client);

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java Mon May 27 08:00:08 2013
@@ -2,13 +2,12 @@ package org.apache.hadoop.hive.ql.cube.p
 
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Date;
-import java.util.List;
+import java.util.HashSet;
+import java.util.Set;
 
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
-import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
 import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.junit.Assert;
 import org.junit.Test;
@@ -27,7 +26,8 @@ public class TestMaxUpdateInterval<perio
     "2013-Feb-01", "2013-Feb-21"
   };
 
-  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
+  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat(
+      "yyyy-MMM-dd");
 
   private final Date pairs[];
 
@@ -45,39 +45,36 @@ public class TestMaxUpdateInterval<perio
 
   @Test
   public void testMaxUpdatePeriodInInterval() throws Exception {
-    CubeTestSetup setup = new CubeTestSetup();
-    setup.createSources();
-
-    CubeMetastoreClient client =  CubeMetastoreClient.getInstance(
-        new HiveConf(this.getClass()));
-
-    CubeFactTable fact = client.getFactTable("testFact");
-    List<UpdatePeriod> allPeriods = new ArrayList<UpdatePeriod>();
-    for (List<UpdatePeriod >periods : fact.getUpdatePeriods().values()) {
-      allPeriods.addAll(periods);
-    }
+    Set<UpdatePeriod> allPeriods = new HashSet<UpdatePeriod>();
+    allPeriods.addAll(Arrays.asList(UpdatePeriod.values()));
 
     int i = 0;
-    Assert.assertEquals(UpdatePeriod.DAILY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.DAILY, CubeFactTable.maxIntervalInRange(
+        pairs[i], pairs[i+1], allPeriods));
 
     i+=2;
-    Assert.assertEquals(UpdatePeriod.MONTHLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.MONTHLY, CubeFactTable.maxIntervalInRange(
+        pairs[i], pairs[i+1], allPeriods));
 
     i+=2;
-    Assert.assertEquals(UpdatePeriod.QUARTERLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.QUARTERLY, CubeFactTable.
+        maxIntervalInRange(pairs[i], pairs[i+1], allPeriods));
 
     i+=2;
-    Assert.assertEquals(UpdatePeriod.MONTHLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.MONTHLY, CubeFactTable.maxIntervalInRange(
+        pairs[i], pairs[i+1], allPeriods));
 
     i+=2;
-    Assert.assertEquals(UpdatePeriod.QUARTERLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.QUARTERLY, CubeFactTable.
+        maxIntervalInRange(pairs[i], pairs[i+1], allPeriods));
 
     i+=2;
-    Assert.assertEquals(UpdatePeriod.YEARLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.YEARLY, CubeFactTable.maxIntervalInRange(
+        pairs[i], pairs[i+1], allPeriods));
 
-    CubeFactTable weeklyFact = client.getFactTable("testFactWeekly");
     i+=2;
-    Assert.assertEquals(UpdatePeriod.WEEKLY, weeklyFact.maxIntervalInRange(pairs[i], pairs[i+1]));
+    Assert.assertEquals(UpdatePeriod.WEEKLY, CubeFactTable.maxIntervalInRange(
+        pairs[i], pairs[i+1], allPeriods));
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1486532&r1=1486531&r2=1486532&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Mon May 27 08:00:08 2013
@@ -1,7 +1,6 @@
 package org.apache.hadoop.hive.ql.cube.processors;
 
 import java.text.SimpleDateFormat;
-import java.util.Calendar;
 import java.util.Date;
 
 import org.apache.hadoop.conf.Configuration;
@@ -19,25 +18,10 @@ public class TestCubeDriver {
   private Configuration conf;
   private CubeDriver driver;
 
-  static Date now;
-  static Date twodaysBack;
-  static Date twoMonthsBack;
-
   @BeforeClass
   public static void setup() throws Exception {
     CubeTestSetup setup = new CubeTestSetup();
     setup.createSources();
-    Calendar cal = Calendar.getInstance();
-    now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.DAY_OF_MONTH, -2);
-    twodaysBack = cal.getTime();
-    System.out.println("Test twodaysBack:" + twodaysBack);
-    cal = Calendar.getInstance();
-    cal.add(Calendar.MONTH, -2);
-    twoMonthsBack = cal.getTime();
-    System.out.println("Test twoMonthsBack:" + twoMonthsBack);
-
   }
 
   @Before
@@ -81,8 +65,8 @@ public class TestCubeDriver {
     try {
       String hqlQuery = driver.compileCubeQuery("select dim12, SUM(msr2)" +
           " from testCube" +
-          " where time_range_in('" + getDateUptoHours(twodaysBack)
-          + "','" + getDateUptoHours(now) + "')");
+          " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+          + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     } catch (SemanticException e) {
       th = e;
       e.printStackTrace();
@@ -93,82 +77,86 @@ public class TestCubeDriver {
   @Test
   public void testCubeExplain() throws Exception {
     String hqlQuery = driver.compileCubeQuery("explain select SUM(msr2) from " +
-        "testCube where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        "testCube where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
   }
 
   @Test
   public void testCubeWhereQuery() throws Exception {
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" +
-        getDateUptoHours(now));
+    System.out.println("Test from:" + getDateUptoHours(CubeTestSetup.twodaysBack) + " to:" +
+        getDateUptoHours(CubeTestSetup.now));
     //String expected = " sum( testcube.msr2 ) FROM  C1_testfact_HOURLY
     //testcube  WHERE " + whereClause(HOURLY) + " UNION " +
     // SELECT sum( testcube.msr2 ) FROM  C1_testfact_DAILY testcube
     //WHERE + whereClause(DAILY)
 
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
+
+    // Test with partition existence
+    conf.setBoolean(CubeQueryConstants.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
+    conf.setBoolean(CubeQueryConstants.FAIL_QUERY_ON_PARTIAL_DATA, false);
 
+    // Tests for valid tables
     conf.set(CubeQueryConstants.VALID_FACT_TABLES, "testFact");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C2");
     conf.set(CubeQueryConstants.VALID_FACT_TABLES, "testFact");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C1");
     conf.set(CubeQueryConstants.VALID_FACT_TABLES, "testFact2");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     conf.set(CubeQueryConstants.VALID_FACT_TABLES, "");
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C1");
     conf.set(CubeQueryConstants.VALID_STORAGE_FACT_TABLES,
         "C1_testFact2_HOURLY");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
 
-    // TODO fix following cases
-    try {
-      conf.set(CubeQueryConstants.VALID_STORAGE_FACT_TABLES,
-          "C1_testFact_HOURLY");
-      driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-      hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-          " where time_range_in('" + getDateUptoHours(twodaysBack)
-          + "','" + getDateUptoHours(now) + "')");
-      System.out.println("cube hql:" + hqlQuery);
+    conf.set(CubeQueryConstants.VALID_STORAGE_FACT_TABLES,
+        "C1_testFact_HOURLY");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
 
-      conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(),
-          "C2");
-      conf.set(CubeQueryConstants.VALID_FACT_TABLES, "");
-      conf.set(CubeQueryConstants.VALID_STORAGE_FACT_TABLES,
-          "C2_testFact_HOURLY");
-      driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-      hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-          " where time_range_in('" + getDateUptoHours(twodaysBack)
-          + "','" + getDateUptoHours(now) + "')");
-      System.out.println("cube hql:" + hqlQuery);
-    } catch (SemanticException e) {
-      e.printStackTrace();
-    }
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES,
+        "C2");
+    conf.set(CubeQueryConstants.VALID_FACT_TABLES, "");
+    conf.set(CubeQueryConstants.VALID_STORAGE_FACT_TABLES,
+        "C2_testFact_HOURLY");
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
   }
 
@@ -177,8 +165,8 @@ public class TestCubeDriver {
     //String expected = "select SUM(testCube.msr2) from "
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
         + " join citytable on testCube.cityid = citytable.id"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select statetable.name, SUM(msr2) from"
@@ -186,96 +174,85 @@ public class TestCubeDriver {
         + " join citytable on testCube.cityid = citytable.id"
         + " left outer join statetable on statetable.id = citytable.stateid"
         + " right outer join ziptable on citytable.zipcode = ziptable.code"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
         + " join countrytable on testCube.countryid = countrytable.id"
-        + " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
-        + "','" + getDateUptoMonth(now) + "')");
+        + " where time_range_in('" + getDateUptoMonth(CubeTestSetup.twoMonthsBack)
+        + "','" + getDateUptoMonth(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
-
   }
 
   @Test
   public void testCubeGroupbyQuery() throws Exception {
-    Calendar cal = Calendar.getInstance();
-    Date now = cal.getTime();
-    System.out.println("Test now:" + now);
-    cal.add(Calendar.DAY_OF_MONTH, -2);
-    Date twodaysBack = cal.getTime();
-    System.out.println("Test twodaysBack:" + twodaysBack);
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" +
-        getDateUptoHours(now));
     //String expected = "select SUM(testCube.msr2) from "
     String hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from" +
         " testCube"
         + " join citytable on testCube.cityid = citytable.id"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
         + " join citytable on testCube.cityid = citytable.id"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')"
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')"
         + " group by name");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select cityid, SUM(msr2) from testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from" +
         " testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')"
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')"
         + " group by round(zipcode)");
 
     hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from" +
         " testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')"
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')"
         + " group by zipcode");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select cityid, SUM(msr2) from testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')"
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')"
         + " group by round(zipcode)");
     System.out.println("cube hql:" + hqlQuery);
 
-    // TODO to be tested after aggregate resolver
-    /*hqlQuery = driver.compileCubeQuery("select cityid, msr2 from testCube"
-        + " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')"
+    hqlQuery = driver.compileCubeQuery("select cityid, msr2 from testCube"
+        + " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')"
         + " group by round(zipcode)");
     System.out.println("cube hql:" + hqlQuery);
-     */
   }
 
   @Test
   public void testCubeQueryWithAilas() throws Exception {
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) m2 from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     try {
       hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from testCube"
           + " join citytable" +
-          " where time_range_in('" + getDateUptoHours(twodaysBack)
-          + "','" + getDateUptoHours(now) + "')" +
+          " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+          + "','" + getDateUptoHours(CubeTestSetup.now) + "')" +
           " group by name");
       System.out.println("cube hql:" + hqlQuery);
     } catch (SemanticException e) {
@@ -283,44 +260,59 @@ public class TestCubeDriver {
     }
     hqlQuery = driver.compileCubeQuery("select SUM(mycube.msr2) from" +
         " testCube mycube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
 
     hqlQuery = driver.compileCubeQuery("select SUM(testCube.msr2) from" +
         " testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select mycube.msr2 m2 from testCube" +
         " mycube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select testCube.msr2 m2 from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
   }
 
   @Test
   public void testCubeWhereQueryForMonth() throws Exception {
-    System.out.println("Test from:" + getDateUptoHours(twoMonthsBack) + " to:" +
-        getDateUptoHours(now));
+    System.out.println("Test from:" + getDateUptoHours(
+        CubeTestSetup.twoMonthsBack) + " to:" +
+        getDateUptoHours(CubeTestSetup.now));
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoHours(twoMonthsBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twoMonthsBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
 
+    conf.setBoolean(CubeQueryConstants.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+    try {
+      hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+          " where time_range_in('" + getDateUptoHours(
+              CubeTestSetup.twoMonthsBack)
+          + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
+      Assert.assertTrue(false);
+    } catch (SemanticException e) {
+      e.printStackTrace();
+    }
+    conf.setBoolean(CubeQueryConstants.FAIL_QUERY_ON_PARTIAL_DATA, false);
+    driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
+
     // TODO this should consider only two month partitions. Month weight needs
     // to be fixed.
     hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-        " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
-        + "','" + getDateUptoMonth(now) + "')");
+        " where time_range_in('" + getDateUptoMonth(CubeTestSetup.twoMonthsBack)
+        + "','" + getDateUptoMonth(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
   }
 
@@ -330,27 +322,28 @@ public class TestCubeDriver {
         " citytable");
     System.out.println("cube hql:" + hqlQuery);
 
-    hqlQuery = driver.compileCubeQuery("select name, c.stateid from citytable c");
+    hqlQuery = driver.compileCubeQuery("select name, c.stateid from citytable" +
+        " c");
     System.out.println("cube hql:" + hqlQuery);
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C2");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C1");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
     System.out.println("cube hql:" + hqlQuery);
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConstants.VALID_STORAGE_DIM_TABLES, "C1_citytable");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
     System.out.println("cube hql:" + hqlQuery);
 
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConstants.VALID_STORAGE_DIM_TABLES, "C2_citytable");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
@@ -363,12 +356,12 @@ public class TestCubeDriver {
         " citytable limit 100");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C2");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable " +
         "limit 100");
     System.out.println("cube hql:" + hqlQuery);
-    conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
+    conf.set(CubeQueryConstants.DRIVER_SUPPORTED_STORAGES, "C1");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
     hqlQuery = driver.compileCubeQuery("select name, stateid from citytable" +
         " limit 100");
@@ -513,18 +506,18 @@ public class TestCubeDriver {
   public void testFactsWithInvalidColumns() throws Exception {
     String hqlQuery = driver.compileCubeQuery("select dim1, AVG(msr1)," +
         " msr2 from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     hqlQuery = driver.compileCubeQuery("select dim1, dim2, COUNT(msr1)," +
         " SUM(msr2), msr3 from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     hqlQuery = driver.compileCubeQuery("select dim1, dim2, cityid, SUM(msr1)," +
         " SUM(msr2), msr3 from testCube" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')");
+        " where time_range_in('" + getDateUptoHours(CubeTestSetup.twodaysBack)
+        + "','" + getDateUptoHours(CubeTestSetup.now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
   }