You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/02 09:00:23 UTC

svn commit: r1463407 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ java/org/apache/hadoop/hive/ql/cube/processors/ test/org/apache/hadoop/hive/ql/cube/metadata/ test/org/a...

Author: amareshwari
Date: Tue Apr  2 07:00:22 2013
New Revision: 1463407

URL: http://svn.apache.org/r1463407
Log:
Add partition resolver

Added:
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Tue Apr  2 07:00:22 2013
@@ -5,6 +5,7 @@ import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -119,21 +120,22 @@ public final class CubeFactTable extends
     if (fmt != null) {
       Calendar cal = Calendar.getInstance();
       cal.setTime(fromDate);
-      List<String> summaries = new ArrayList<String>();
+      List<String> partitions = new ArrayList<String>();
       Date dt = cal.getTime();
       while (dt.compareTo(toDate) < 0) {
-        summaries.add(new SimpleDateFormat(fmt).format(cal.getTime()));
+        String part = new SimpleDateFormat(fmt).format(cal.getTime());
+        System.out.println("Adding partition:" + part + " for table:" + getName());
+        partitions.add(part);
         cal.add(interval.calendarField(), 1);
         dt = cal.getTime();
       }
-      return summaries;
+      return partitions;
     } else {
       return null;
     }
   }
 
-  public static UpdatePeriod maxIntervalInRange(Date from, Date to,
-      Set<UpdatePeriod> updatePeriods) {
+  public UpdatePeriod maxIntervalInRange(Date from, Date to) {
     long diff = to.getTime() - from.getTime();
     if (diff < UpdatePeriod.MIN_INTERVAL) {
       return null;
@@ -141,6 +143,10 @@ public final class CubeFactTable extends
     UpdatePeriod max = null;
     long minratio = diff / UpdatePeriod.MIN_INTERVAL;
 
+    Set<UpdatePeriod> updatePeriods = new HashSet<UpdatePeriod>();
+    for (List<UpdatePeriod> value : storageUpdatePeriods.values()) {
+      updatePeriods.addAll(value);
+    }
     for (UpdatePeriod i : updatePeriods) {
       long tmpratio = diff / i.weight();
       if (tmpratio == 0) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java Tue Apr  2 07:00:22 2013
@@ -411,33 +411,41 @@ public class CubeMetastoreClient {
     return isFactTable(tbl);
   }
 
-  private boolean isFactTable(Table tbl) {
+   boolean isFactTable(Table tbl) {
     String tableType = tbl.getParameters().get(
         MetastoreConstants.TABLE_TYPE_KEY);
-    return CubeTableType.FACT.equals(tableType);
+    return CubeTableType.FACT.name().equals(tableType);
   }
 
-  private boolean isFactTableForCube(Table tbl, Cube cube) {
+  boolean isFactTableForCube(Table tbl, Cube cube) {
     if (isFactTable(tbl)) {
       String cubeName = tbl.getParameters().get(
           MetastoreUtil.getFactCubeNameKey(tbl.getTableName()));
-      return cube.getName().equals(cubeName);
+      return cubeName.equalsIgnoreCase(cube.getName());
     }
     return false;
   }
 
   public boolean isDimensionTable(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
+    return isDimensionTable(tbl);
+  }
+
+  boolean isDimensionTable(Table tbl) throws HiveException {
     String tableType = tbl.getParameters().get(
         MetastoreConstants.TABLE_TYPE_KEY);
-    return CubeTableType.DIMENSION.equals(tableType);
+    return CubeTableType.DIMENSION.name().equals(tableType);
   }
 
   public boolean isCube(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
+    return isCube(tbl);
+  }
+
+  boolean isCube(Table tbl) throws HiveException {
     String tableType = tbl.getParameters().get(
         MetastoreConstants.TABLE_TYPE_KEY);
-    return CubeTableType.CUBE.equals(tableType);
+    return CubeTableType.CUBE.name().equals(tableType);
   }
 
   public CubeFactTable getFactTable(String tableName) throws HiveException {
@@ -455,8 +463,7 @@ public class CubeMetastoreClient {
   public CubeDimensionTable getDimensionTable(String tableName)
       throws HiveException {
     Table tbl = getTable(tableName);
-    if (CubeTableType.DIMENSION.equals(tbl.getParameters().get(
-        MetastoreConstants.TABLE_TYPE_KEY))) {
+    if (isDimensionTable(tableName)) {
       return new CubeDimensionTable(tbl);
     }
     return null;
@@ -464,8 +471,7 @@ public class CubeMetastoreClient {
 
   public Cube getCube(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
-    if (CubeTableType.CUBE.equals(tbl.getParameters().get(
-        MetastoreConstants.TABLE_TYPE_KEY))) {
+    if (isCube(tableName)) {
       return new Cube(tbl);
     }
     return null;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java Tue Apr  2 07:00:22 2013
@@ -7,7 +7,7 @@ public enum UpdatePeriod implements Name
   SECONDLY (Calendar.SECOND, 1000, "yyyy-MM-dd-HH-mm-ss"),
   MINUTELY (Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
   HOURLY (Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
-  DAILY (Calendar.DATE, 24 * HOURLY.weight(), "yyyy-MM-DD"),
+  DAILY (Calendar.DATE, 24 * HOURLY.weight(), "yyyy-MM-dd"),
   WEEKLY (Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
   MONTHLY (Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
   //QUARTERLY (Calendar.MONTH, 3 * MONTHLY.weight(), "YYYY-MM"),

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Tue Apr  2 07:00:22 2013
@@ -18,6 +18,7 @@ import org.apache.hadoop.hive.ql.cube.me
 import org.apache.hadoop.hive.ql.cube.metadata.CubeDimensionTable;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
 import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.JoinCond;
@@ -38,21 +39,22 @@ public class CubeQueryContext {
   private String fromDateRaw;
   private String toDateRaw;
   private Cube cube;
-  private Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
-  private Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
+  protected Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
+  protected Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
   private final Map<String, List<String>> tblToColumns = new HashMap<String, List<String>>();
   private Date timeFrom;
   private Date timeTo;
   private String clauseName = null;
   private Map<String, List<String>> partitionCols;
+  private Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap;
 
   public CubeQueryContext(ASTNode ast, QB qb, HiveConf conf)
       throws SemanticException {
     this.ast = ast;
     this.qb = qb;
     this.conf = conf;
-    //extractTimeRange();
-    //extractMetaTables();
+    extractMetaTables();
+    extractTimeRange();
   }
 
   public CubeQueryContext(CubeQueryContext other) {
@@ -91,6 +93,10 @@ public class CubeQueryContext {
           dimensions.add(client.getDimensionTable(tblName));
         }
       }
+      if (cube == null && dimensions.size() == 0) {
+        throw new SemanticException("Neither cube nor dimensions accessed");
+      }
+      candidateFactTables.addAll(client.getAllFactTables(cube));
     } catch (HiveException e) {
       throw new SemanticException(e);
     }
@@ -109,7 +115,7 @@ public class CubeQueryContext {
     // Time range should be direct child of where condition
     // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
     // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
-    ASTNode whereTree = qb.getParseInfo().getWhrForClause(clauseName);
+    ASTNode whereTree = qb.getParseInfo().getWhrForClause(getClause());
     if (whereTree == null || whereTree.getChildCount() < 1) {
       throw new SemanticException("No filter specified");
     }
@@ -145,6 +151,8 @@ public class CubeQueryContext {
     } catch (HiveException e) {
       throw new SemanticException(e);
     }
+    System.out.println("timeFrom:" + timeFrom);
+    System.out.println("timeTo:" + timeTo);
   }
 
 /*  private void extractColumns() {
@@ -430,8 +438,40 @@ public class CubeQueryContext {
     }
   }
 
-  public String toHQL() {
-    // TODO Auto-generated method stub
+  public ASTNode getSelectTree() {
+    return qb.getParseInfo().getSelForClause(getClause());
+  }
+
+  public ASTNode getWhereTree() {
+    return qb.getParseInfo().getWhrForClause(getClause());
+  }
+
+  public ASTNode getGroupbyTree() {
+    return qb.getParseInfo().getGroupByForClause(getClause());
+  }
+
+  public ASTNode getHavingTree() {
+    return qb.getParseInfo().getHavingForClause(getClause());
+  }
+
+  public ASTNode getJoinTree() {
+    return qb.getParseInfo().getJoinExpr();
+  }
+
+  public ASTNode getOrderbyTree() {
+    return qb.getParseInfo().getOrderByForClause(getClause());
+  }
+
+  public String toHQL() throws SemanticException {
     return null;
   }
+
+  public Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getFactPartitionMap() {
+    return factPartitionMap;
+  }
+
+  public void setFactPartitionMap(Map<CubeFactTable,
+      Map<UpdatePeriod, List<String>>> factPartitionMap) {
+    this.factPartitionMap = factPartitionMap;
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContextWithStorage.java Tue Apr  2 07:00:22 2013
@@ -1,7 +1,10 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
 import java.util.List;
+import java.util.Map;
 
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.QB;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -9,21 +12,59 @@ import org.apache.hadoop.hive.ql.parse.S
 public class CubeQueryContextWithStorage extends CubeQueryContext {
 
   private final List<String> supportedStorages;
+  private final boolean allStoragesSupported;
+  private Map<CubeFactTable, Map<UpdatePeriod, List<String>>> storageTableMap;
 
   public CubeQueryContextWithStorage(ASTNode ast, QB qb,
       List<String> supportedStorages) throws SemanticException {
     super(ast, qb, null);
     this.supportedStorages = supportedStorages;
+    this.allStoragesSupported = (supportedStorages == null);
   }
 
   public CubeQueryContextWithStorage(CubeQueryContext cubeql,
       List<String> supportedStorages) {
     super(cubeql);
     this.supportedStorages = supportedStorages;
+    this.allStoragesSupported = (supportedStorages == null);
   }
 
   public List<String> getStorageNames() {
     return supportedStorages;
   }
 
+  public String getStorageFactTable() {
+    if (candidateFactTables.size() > 0) {
+      return candidateFactTables.iterator().next().getName();
+    }
+    return null;
+  }
+
+  String simpleQueryFormat = "SELECT %s FROM %s WHERE %s";
+  String joinQueryFormat = "SELECT %s FROM %s JOIN %s WHERE %s";
+
+  @Override
+  public String toHQL() throws SemanticException {
+    String fromString = getStorageFactTable();
+    if (fromString == null) {
+      throw new SemanticException("No valid fact table available");
+    }
+    String selectString = HQLParser.getString(getSelectTree());
+    String whereString = HQLParser.getString(getWhereTree());
+
+    String actualQuery = String.format(simpleQueryFormat, selectString,
+        fromString,
+        whereString);
+    return actualQuery;
+  }
+
+  public Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getStorageTableMap() {
+    return storageTableMap;
+  }
+
+  public void setStorageTableMap(Map<CubeFactTable,
+      Map<UpdatePeriod, List<String>>> storageTableMap) {
+    this.storageTableMap = storageTableMap;
+  }
+
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Tue Apr  2 07:00:22 2013
@@ -5,6 +5,7 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
@@ -40,12 +41,12 @@ public class CubeQueryRewriter {
     phase2Rewriters.add(new LeastDimensionResolver(conf));
   }
 
-  public CubeQueryContext rewritePhase1(String cubeql)
+  public CubeQueryContext rewritePhase1(ASTNode astnode)
       throws SemanticException, ParseException {
     CubeQueryContext ctx;
       CubeSemanticAnalyzer analyzer =  new CubeSemanticAnalyzer(
           new HiveConf(conf, HiveConf.class));
-      analyzer.analyzeInternal(HQLParser.parseHQL(cubeql));
+      analyzer.analyzeInternal(astnode);
       ctx = analyzer.getQueryContext();
       rewrite(phase1Rewriters, ctx);
     return ctx;
@@ -68,7 +69,7 @@ public class CubeQueryRewriter {
   }
 
   public static void main(String[] args) throws SemanticException, ParseException {
-    CubeQueryRewriter writer = new CubeQueryRewriter(new Configuration());
-    writer.rewritePhase1("select * from cube");
+   // CubeQueryRewriter writer = new CubeQueryRewriter(new Configuration());
+   // writer.rewritePhase1("select * from cube");
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java Tue Apr  2 07:00:22 2013
@@ -7,6 +7,7 @@ import java.util.Date;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.log4j.Logger;
 
@@ -46,22 +47,49 @@ public class DateUtils {
 
   public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
 
-  public static final String ABSDATE_FMT = "dd-MMM-yyyy HH:mm:ss,SSS Z";
+  public static String YEAR_FMT = "[0-9]{4}";
+  public static String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
+  public static String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
+  public static String HOUR_FMT = DAY_FMT + " [0-9]{2}";
+  public static String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
+  public static String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
+  public static final String ABSDATE_FMT = "yyyy-MM-dd HH:mm:ss,SSS";
   public static final SimpleDateFormat ABSDATE_PARSER = new SimpleDateFormat(ABSDATE_FMT);
 
   public static String formatDate(Date dt) {
     return ABSDATE_PARSER.format(dt);
   }
 
+  public static String getAbsDateFormatString(String str) {
+    if (str.matches(YEAR_FMT)) {
+      return str + "-01-01 00:00:00,000";
+    } else if (str.matches(MONTH_FMT)) {
+      return str + "-01 00:00:00,000";
+    } else if (str.matches(DAY_FMT)) {
+      return str + " 00:00:00,000";
+    } else if (str.matches(HOUR_FMT)) {
+      return str + ":00:00,000";
+    } else if (str.matches(MINUTE_FMT)) {
+      return str + ":00,000";
+    } else if (str.matches(SECOND_FMT)) {
+      return str + ",000";
+    } else if (str.matches(ABSDATE_FMT)) {
+      return str;
+    }
+    throw new IllegalArgumentException("Unsupported formatting for date" + str);
+  }
+
   public static Date resolveDate(String str, Date now) throws HiveException {
     if (RELDATE_VALIDATOR.matcher(str).matches()) {
       return resolveRelativeDate(str, now);
     } else {
       try {
-        return ABSDATE_PARSER.parse(str);
+        return ABSDATE_PARSER.parse(getAbsDateFormatString(str));
       } catch (ParseException e) {
-        LOG.error("Invalid date format. expected only " + ABSDATE_FMT + " date provided:" + str, e);
-        throw new HiveException("Date parsing error. expected format " + ABSDATE_FMT
+        LOG.error("Invalid date format. expected only " + ABSDATE_FMT
+            + " date provided:" + str, e);
+        throw new HiveException("Date parsing error. expected format "
+            + ABSDATE_FMT
             + ", date provided: " + str
             + ", failed because: " + e.getMessage());
       }
@@ -120,4 +148,75 @@ public class DateUtils {
 
     return calendar.getTime();
   }
+
+  public static Date getCeilDate(Date fromDate, UpdatePeriod interval) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(fromDate);
+    boolean hasFraction = false;
+    switch (interval) {
+    case YEARLY :
+      if (cal.get(Calendar.MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case MONTHLY :
+      if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case WEEKLY :
+      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case DAILY :
+      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case HOURLY :
+      if (cal.get(Calendar.MINUTE) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MINUTELY :
+      if (cal.get(Calendar.SECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case SECONDLY :
+      if (cal.get(Calendar.MILLISECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    }
+
+    if (hasFraction) {
+      cal.roll(interval.calendarField(), true);
+      return getFloorDate(cal.getTime(), interval);
+    } else {
+      return fromDate;
+    }
+  }
+
+  public static Date getFloorDate(Date toDate, UpdatePeriod interval) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(toDate);
+    switch (interval) {
+    case YEARLY :
+      cal.set(Calendar.MONTH, 1);
+    case MONTHLY :
+      cal.set(Calendar.DAY_OF_MONTH, 1);
+    case WEEKLY :
+      cal.set(Calendar.DAY_OF_WEEK, 1);
+    case DAILY :
+      cal.set(Calendar.HOUR_OF_DAY, 0);
+    case HOURLY :
+      cal.set(Calendar.MINUTE, 0);
+    case MINUTELY :
+      cal.set(Calendar.SECOND, 0);
+    case SECONDLY :
+    }
+    return cal.getTime();
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Tue Apr  2 07:00:22 2013
@@ -1,54 +1,77 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class PartitionResolver implements ContextRewriter {
 
   public PartitionResolver(Configuration conf) {
-    // TODO Auto-generated constructor stub
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) {
-    Map<String, List<String>> partitionColMap = new HashMap<String,
-        List<String>>();
-    /*Date fromDate = cubeql.getFromDate();
+  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+    Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
+        new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
+    Date fromDate = cubeql.getFromDate();
     Date toDate = cubeql.getToDate();
 
-    //resolve summary table names, applicable only if query is on fact table
-
     Calendar cal = Calendar.getInstance();
     cal.setTime(fromDate);
 
-    UpdatePeriod interval = null;
     for (CubeFactTable fact : cubeql.getFactTables()) {
-      while ((interval = CubeFactTable.maxIntervalInRange(fromDate, toDate,
-          fact.getUpdatePeriods())) != null) {
-        List<String> partitions = fact.getPartitions(fromDate, toDate,
-            interval);
-        if (partitions != null) {
-          partitionColMap.put(MetastoreUtil.getVirtualFactTableName(
-              fact.getName(), interval), partitions);
-          // Advance from date
-          cal.setTime(fromDate);
-          cal.roll(interval.calendarField(), partitions.size());
-          fromDate = cal.getTime();
-        }
-      }
+      Map<UpdatePeriod, List<String>> partitionColMap =
+          new HashMap<UpdatePeriod, List<String>>();
+      factPartitionMap.put(fact, partitionColMap);
+      getPartitions(fact, fromDate, toDate, partitionColMap);
     }
-    for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
+
+    /*for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
       partitionColMap.put(MetastoreUtil.getVirtualDimTableName(
           dim.getName()), dim.getPartitions());
-    }
+    }*/
 
     // set partition cols map in cubeql
-    //TODO
-     *
-     */
+    cubeql.setFactPartitionMap(factPartitionMap);
   }
 
+  void getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
+      Map<UpdatePeriod, List<String>> partitionColMap)
+          throws SemanticException {
+    if (fromDate.equals(toDate) || fromDate.after(toDate)) {
+      return;
+    }
+
+    UpdatePeriod interval = fact.maxIntervalInRange(fromDate, toDate);
+    if (interval == null) {
+      throw new SemanticException("Could not find a partition for given range:"
+        + fromDate + "-" + toDate);
+    }
+
+    System.out.println("fact: " + fact.getName() + " max interval:" + interval);
+    Date ceilFromDate = DateUtils.getCeilDate(fromDate, interval);
+    Date floorToDate = DateUtils.getFloorDate(toDate, interval);
+    List<String> partitions = fact.getPartitions(ceilFromDate, floorToDate,
+          interval);
+    if (partitions != null) {
+      List<String> parts = partitionColMap.get(interval);
+      if (parts == null) {
+        parts = new ArrayList<String>();
+        partitionColMap.put(interval, parts);
+      }
+      parts.addAll(partitions);
+    }
+    System.out.println("ceilFromDate:" + ceilFromDate);
+    System.out.println("floorToDate:" + floorToDate);
+    getPartitions(fact, fromDate, ceilFromDate, partitionColMap);
+    getPartitions(fact, floorToDate, toDate, partitionColMap);
+  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java Tue Apr  2 07:00:22 2013
@@ -1,6 +1,12 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class StorageTableResolver implements ContextRewriter {
@@ -12,8 +18,13 @@ public class StorageTableResolver implem
   @Override
   public void rewriteContext(CubeQueryContext cubeql)
       throws SemanticException {
-    // TODO
+    CubeQueryContextWithStorage cubeqlStorage =
+        (CubeQueryContextWithStorage) cubeql;
+    Map<CubeFactTable, Map<UpdatePeriod, List<String>>> storageTableMap =
+        new HashMap<CubeFactTable, Map<UpdatePeriod,List<String>>>();
     //Find candidate tables wrt supported storages
+
+    cubeqlStorage.setStorageTableMap(storageTableMap);
   }
 
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java Tue Apr  2 07:00:22 2013
@@ -1,12 +1,19 @@
 package org.apache.hadoop.hive.ql.cube.processors;
 
+import java.io.IOException;
 import java.util.List;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.cube.parse.CubeQueryContext;
 import org.apache.hadoop.hive.ql.cube.parse.CubeQueryRewriter;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class CubeDriver extends Driver {
 
@@ -18,16 +25,14 @@ public class CubeDriver extends Driver {
     super();
   }
 
+  public static String CUBE_QUERY_PFX = "CUBE ";
+  private Context ctx;
+
   @Override
   public int compile(String command) {
-    // compile the cube query and rewrite it to HQL query
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(getConf());
-    CubeQueryContext finalQuery;
+    String query;
     try {
-      // 1. rewrite query to get summary tables and joins
-      CubeQueryContext phase1Query = rewriter.rewritePhase1(command);
-      finalQuery = rewriter.rewritePhase2(phase1Query,
-          getSupportedStorages(getConf()));
+      query = compileCubeQuery(command.substring(CUBE_QUERY_PFX.length()));
     } catch (Exception e) {
       ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
       errorMessage = "FAILED: " + e.getClass().getSimpleName();
@@ -41,7 +46,23 @@ public class CubeDriver extends Driver {
       return error.getErrorCode();
 
     }
-    return super.compile(finalQuery.toHQL());
+    return super.compile(query);
+  }
+
+  String compileCubeQuery(String query)
+      throws SemanticException, ParseException, IOException {
+    System.out.println("Query :" + query);
+    ctx = new Context(getConf());
+    ParseDriver pd = new ParseDriver();
+    ASTNode tree = pd.parse(query, ctx);
+    tree = ParseUtils.findRootNonNullToken(tree);
+    // compile the cube query and rewrite it to HQL query
+    CubeQueryRewriter rewriter = new CubeQueryRewriter(getConf());
+    // 1. rewrite query to get summary tables and joins
+    CubeQueryContext phase1Query = rewriter.rewritePhase1(tree);
+    CubeQueryContext finalQuery = rewriter.rewritePhase2(phase1Query,
+        getSupportedStorages(getConf()));
+    return finalQuery.toHQL();
   }
 
   private List<String> getSupportedStorages(HiveConf conf) {

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java Tue Apr  2 07:00:22 2013
@@ -97,6 +97,7 @@ public class TestCubeMetastoreClient {
     client.createCube(cubeName, cubeMeasures, cubeDimensions);
     Assert.assertTrue(client.tableExists(cubeName));
     Table cubeTbl = client.getHiveTable(cubeName);
+    Assert.assertTrue(client.isCube(cubeTbl));
     Cube cube2 = new Cube(cubeTbl);
     Assert.assertTrue(cube.equals(cube2));
   }
@@ -134,6 +135,8 @@ public class TestCubeMetastoreClient {
         storageAggregatePeriods);
     Assert.assertTrue(client.tableExists(factName));
     Table cubeTbl = client.getHiveTable(factName);
+    Assert.assertTrue(client.isFactTable(cubeTbl));
+    Assert.assertTrue(client.isFactTableForCube(cubeTbl, cube));
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
     Assert.assertTrue(cubeFact.equals(cubeFact2));
 
@@ -191,6 +194,8 @@ public class TestCubeMetastoreClient {
         storageAggregatePeriods);
     Assert.assertTrue(client.tableExists(factNameWithPart));
     Table cubeTbl = client.getHiveTable(factNameWithPart);
+    Assert.assertTrue(client.isFactTable(cubeTbl));
+    Assert.assertTrue(client.isFactTableForCube(cubeTbl, cube));
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
     Assert.assertTrue(cubeFactWithParts.equals(cubeFact2));
 
@@ -258,6 +263,8 @@ public class TestCubeMetastoreClient {
         storageAggregatePeriods);
     Assert.assertTrue(client.tableExists(factName));
     Table cubeTbl = client.getHiveTable(factName);
+    Assert.assertTrue(client.isFactTable(cubeTbl));
+    Assert.assertTrue(client.isFactTableForCube(cubeTbl, cube));
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
     Assert.assertTrue(cubeFactWithTwoStorages.equals(cubeFact2));
 
@@ -316,6 +323,7 @@ public class TestCubeMetastoreClient {
         snapshotDumpPeriods);
     Assert.assertTrue(client.tableExists(dimName));
     Table cubeTbl = client.getHiveTable(dimName);
+    Assert.assertTrue(client.isDimensionTable(cubeTbl));
     CubeDimensionTable cubeDim2 = new CubeDimensionTable(cubeTbl);
     Assert.assertTrue(cubeDim.equals(cubeDim2));
 
@@ -357,6 +365,7 @@ public class TestCubeMetastoreClient {
         storages);
     Assert.assertTrue(client.tableExists(dimName));
     Table cubeTbl = client.getHiveTable(dimName);
+    Assert.assertTrue(client.isDimensionTable(cubeTbl));
     CubeDimensionTable cubeDim2 = new CubeDimensionTable(cubeTbl);
     Assert.assertTrue(cubeDim.equals(cubeDim2));
 
@@ -401,6 +410,7 @@ public class TestCubeMetastoreClient {
         snapshotDumpPeriods);
     Assert.assertTrue(client.tableExists(dimName));
     Table cubeTbl = client.getHiveTable(dimName);
+    Assert.assertTrue(client.isDimensionTable(cubeTbl));
     CubeDimensionTable cubeDim2 = new CubeDimensionTable(cubeTbl);
     Assert.assertTrue(cubeDim.equals(cubeDim2));
 

Added: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1463407&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (added)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Tue Apr  2 07:00:22 2013
@@ -0,0 +1,123 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.cube.metadata.BaseDimension;
+import org.apache.hadoop.hive.ql.cube.metadata.ColumnMeasure;
+import org.apache.hadoop.hive.ql.cube.metadata.Cube;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeDimension;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeMeasure;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
+import org.apache.hadoop.hive.ql.cube.metadata.ExprMeasure;
+import org.apache.hadoop.hive.ql.cube.metadata.HDFSStorage;
+import org.apache.hadoop.hive.ql.cube.metadata.HierarchicalDimension;
+import org.apache.hadoop.hive.ql.cube.metadata.InlineDimension;
+import org.apache.hadoop.hive.ql.cube.metadata.ReferencedDimension;
+import org.apache.hadoop.hive.ql.cube.metadata.Storage;
+import org.apache.hadoop.hive.ql.cube.metadata.TableReference;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+public class CubeTestSetup {
+
+  private Cube cube;
+  private Set<CubeMeasure> cubeMeasures;
+  private Set<CubeDimension> cubeDimensions;
+  private final String cubeName = "testCube";
+
+  private void createCube(CubeMetastoreClient client) throws HiveException {
+    cubeMeasures = new HashSet<CubeMeasure>();
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr1", "int",
+        "first measure")));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr2", "float",
+        "second measure"),
+        null, "SUM", "RS"));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr3", "double",
+        "third measure"),
+        null, "MAX", null));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr4", "bigint",
+        "fourth measure"),
+        null, "COUNT", null));
+    cubeMeasures.add(new ExprMeasure(new FieldSchema("msr5", "double",
+        "fifth measure"),
+        "avg(msr1 + msr2)"));
+    cubeMeasures.add(new ExprMeasure(new FieldSchema("msr6", "bigint",
+        "sixth measure"),
+        "(msr1 + msr2)/ msr4", "", "SUM", "RS"));
+
+    cubeDimensions = new HashSet<CubeDimension>();
+    List<CubeDimension> locationHierarchy = new ArrayList<CubeDimension>();
+    locationHierarchy.add(new ReferencedDimension(new FieldSchema("zipcode",
+        "int", "zip"), new TableReference("ziptable", "zipcode")));
+    locationHierarchy.add(new ReferencedDimension(new FieldSchema("cityid",
+        "int", "city"), new TableReference("citytable", "id")));
+    locationHierarchy.add(new ReferencedDimension(new FieldSchema("stateid",
+        "int", "state"), new TableReference("statetable", "id")));
+    locationHierarchy.add(new ReferencedDimension(new FieldSchema("countryid",
+        "int", "country"), new TableReference("countrytable", "id")));
+    List<String> regions = Arrays.asList("APAC", "EMEA", "USA");
+    locationHierarchy.add(new InlineDimension(new FieldSchema("regionname",
+        "string", "region"), regions));
+
+    cubeDimensions.add(new HierarchicalDimension("location", locationHierarchy));
+    cubeDimensions.add(new BaseDimension(new FieldSchema("dim1", "string",
+        "basedim")));
+    cubeDimensions.add(new ReferencedDimension(
+            new FieldSchema("dim2", "string", "ref dim"),
+            new TableReference("testdim2", "id")));
+    cubeDimensions.add(new InlineDimension(
+            new FieldSchema("region", "string", "region dim"), regions));
+    cube = new Cube(cubeName, cubeMeasures, cubeDimensions);
+    client.createCube(cubeName, cubeMeasures, cubeDimensions);
+  }
+
+  private void createCubeFact(CubeMetastoreClient client) throws HiveException {
+    String factName = "testFact";
+    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(
+        cubeMeasures.size());
+    for (CubeMeasure measure : cubeMeasures) {
+      factColumns.add(measure.getColumn());
+    }
+
+    // add one dimension of the cube
+    factColumns.add(new FieldSchema("zipcode","int", "zip"));
+
+    Map<String, List<UpdatePeriod>> updatePeriods =
+        new HashMap<String, List<UpdatePeriod>>();
+    Map<Storage, List<UpdatePeriod>> storageAggregatePeriods =
+        new HashMap<Storage, List<UpdatePeriod>>();
+    List<UpdatePeriod> updates  = new ArrayList<UpdatePeriod>();
+    updates.add(UpdatePeriod.HOURLY);
+    updates.add(UpdatePeriod.DAILY);
+    Storage hdfsStorage = new HDFSStorage("C1",
+        TextInputFormat.class.getCanonicalName(),
+        HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    storageAggregatePeriods.put(hdfsStorage, updates);
+    updatePeriods.put(hdfsStorage.getName(), updates);
+
+    CubeFactTable cubeFact = new CubeFactTable(cubeName, factName, factColumns,
+        updatePeriods);
+    // create cube fact
+    client.createCubeFactTable(cubeName, factName, factColumns,
+        storageAggregatePeriods);
+  }
+
+  public void createSources() throws Exception {
+    CubeMetastoreClient client =  CubeMetastoreClient.getInstance(
+        new HiveConf(this.getClass()));
+    createCube(client);
+    createCubeFact(client);
+  }
+
+}

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java?rev=1463407&r1=1463406&r2=1463407&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java Tue Apr  2 07:00:22 2013
@@ -3,15 +3,21 @@ package org.apache.hadoop.hive.ql.cube.p
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.junit.Before;
 import org.junit.Test;
 
 public class TestCubeSemanticAnalyzer {
-  Configuration conf = new Configuration();
+  private final Configuration conf = new Configuration();
 
-  CubeSemanticAnalyzer analyzer;
+  private final CubeSemanticAnalyzer analyzer;
 
   ASTNode astRoot;
 
+  public TestCubeSemanticAnalyzer() throws Exception {
+    analyzer = new CubeSemanticAnalyzer(
+        new HiveConf(conf, HiveConf.class));
+  }
+
   String queries[] = { "SELECT t1.c1 rsalias0, f(t1.c2) rsalias1," +
   		" (t2.c3 + t2.c4) rsalias2, avg(fc5/fc6) * fc7 " +
       " FROM facttab t1" +
@@ -20,13 +26,28 @@ public class TestCubeSemanticAnalyzer {
       " GROUP BY t1.ca, t1.cb" +
       " HAVING t2.c3 > 100" +
       " ORDER BY t3.ca, t4.cb" +
-      " LIMIT 100"
+      " LIMIT 100",
+      "SELECT count(*) FROM TAB2 WHERE time_range_in('NOW - 1MONTH', 'NOW')"
   };
 
-  @Test
+  @Before
+  public void setup() throws Exception {
+    CubeTestSetup setup = new CubeTestSetup();
+    setup.createSources();
+  }
+
+  //@Test
   public void testSemnaticAnalyzer() throws Exception {
-    analyzer = new CubeSemanticAnalyzer(new HiveConf(conf, HiveConf.class));
     astRoot = HQLParser.parseHQL(queries[0]);
     analyzer.analyzeInternal(astRoot);
   }
+
+  @Test
+  public void testSimpleQuery() throws Exception {
+    astRoot = HQLParser.parseHQL("select SUM(msr2) from testCube where time_range_in('NOW - 2DAYS', 'NOW')");
+    analyzer.analyzeInternal(astRoot);
+    CubeQueryContext cubeql = analyzer.getQueryContext();
+    //System.out.println("cube hql:" + cubeql.toHQL());
+    //Assert.assertEquals(queries[1], cubeql.toHQL());
+  }
 }

Added: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1463407&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (added)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Tue Apr  2 07:00:22 2013
@@ -0,0 +1,59 @@
+package org.apache.hadoop.hive.ql.cube.processors;
+
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.cube.parse.CubeTestSetup;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestCubeDriver {
+
+  private static CubeDriver driver;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    CubeTestSetup setup = new CubeTestSetup();
+    setup.createSources();
+    driver = new CubeDriver(new HiveConf());
+  }
+
+  public static String HOUR_FMT = "yyyy-MM-dd HH";
+  public static final SimpleDateFormat HOUR_PARSER = new SimpleDateFormat(HOUR_FMT);
+
+  public static String getDateUptoHours(Date dt) {
+    return HOUR_PARSER.format(dt);
+  }
+
+  @Test
+  public void testSimpleQuery1() throws Exception {
+    Throwable th = null;
+    try {
+      String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+    		" where time_range_in('NOW - 2DAYS', 'NOW')");
+    } catch (SemanticException e) {
+      th = e;
+      e.printStackTrace();
+    }
+    Assert.assertNotNull(th);
+  }
+
+  @Test
+  public void testSimpleQuery2() throws Exception {
+    Calendar cal = Calendar.getInstance();
+    Date now = cal.getTime();
+    cal.add(Calendar.DAY_OF_MONTH, -2);
+    Date twodaysBack = cal.getTime();
+    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
+    String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+        " where time_range_in('" + getDateUptoHours(twodaysBack)
+        + "','" + getDateUptoHours(now) + "')");
+    System.out.println("cube hql:" + hqlQuery);
+    //Assert.assertEquals(queries[1], cubeql.toHQL());
+  }
+
+}