You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/04/29 13:51:16 UTC

svn commit: r1476977 - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/parse/ test/org/apache/hadoop/hive/ql/cube/processors/

Author: amareshwari
Date: Mon Apr 29 11:51:15 2013
New Revision: 1476977

URL: http://svn.apache.org/r1476977
Log:
Add support for QUARTERLY update period

Added:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestDateUtil.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java
Removed:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtils.java
Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1476977&r1=1476976&r2=1476977&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Mon Apr 29 11:51:15 2013
@@ -11,6 +11,8 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod.UpdatePeriodComparator;
+import org.apache.hadoop.hive.ql.cube.parse.DateUtil;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 public final class CubeFactTable extends AbstractCubeTable {
@@ -134,27 +136,48 @@ public final class CubeFactTable extends
     }
   }
 
+
   public UpdatePeriod maxIntervalInRange(Date from, Date to) {
+    UpdatePeriod max = null;
+
     long diff = to.getTime() - from.getTime();
     if (diff < UpdatePeriod.MIN_INTERVAL) {
       return null;
     }
-    UpdatePeriod max = null;
-    long minratio = diff / UpdatePeriod.MIN_INTERVAL;
 
     Set<UpdatePeriod> updatePeriods = new HashSet<UpdatePeriod>();
+
     for (List<UpdatePeriod> value : storageUpdatePeriods.values()) {
       updatePeriods.addAll(value);
     }
+
+    // Use weight only till UpdatePeriod.DAILY
+    // Above Daily, check if at least one full update period is present between the dates
+    UpdatePeriodComparator cmp = new UpdatePeriodComparator();
     for (UpdatePeriod i : updatePeriods) {
-      long tmpratio = diff / i.weight();
-      if (tmpratio == 0) {
-        // Interval larger than date difference
-        continue;
-      }
-      if (minratio > tmpratio) {
-        minratio = tmpratio;
-        max = i;
+      if (UpdatePeriod.YEARLY == i || UpdatePeriod.QUARTERLY == i || UpdatePeriod.MONTHLY == i) {
+        int intervals = 0;
+        switch (i) {
+        case YEARLY:  intervals = DateUtil.getYearsBetween(from, to); break;
+        case QUARTERLY: intervals = DateUtil.getQuartersBetween(from, to); break;
+        case MONTHLY: intervals = DateUtil.getMonthsBetween(from, to); break;
+        }
+
+        if (intervals > 0) {
+          if (cmp.compare(i, max) > 0) {
+            max = i;
+          }
+        }
+      } else {
+        // Below MONTHLY, we can use weight to find out the correct period
+        if (diff < i.weight()) {
+          // interval larger than time diff
+          continue;
+        }
+
+        if (cmp.compare(i, max) > 0) {
+          max = i;
+        }
       }
     }
     return max;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java?rev=1476977&r1=1476976&r2=1476977&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java Mon Apr 29 11:51:15 2013
@@ -1,19 +1,20 @@
 package org.apache.hadoop.hive.ql.cube.metadata;
 
 import java.util.Calendar;
+import java.util.Comparator;
 import java.util.Date;
 
-import org.apache.hadoop.hive.ql.cube.parse.DateUtils;
+import org.apache.hadoop.hive.ql.cube.parse.DateUtil;
 
 
-public enum UpdatePeriod implements Named {
+public enum UpdatePeriod implements Named{
   SECONDLY (Calendar.SECOND, 1000, "yyyy-MM-dd-HH-mm-ss"),
   MINUTELY (Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
   HOURLY (Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
   DAILY (Calendar.DAY_OF_MONTH, 24 * HOURLY.weight(), "yyyy-MM-dd"),
   WEEKLY (Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
   MONTHLY (Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
-  //QUARTERLY (Calendar.MONTH, 3 * MONTHLY.weight(), "YYYY-MM"),
+  QUARTERLY (Calendar.MONTH, 3 * MONTHLY.weight(), "yyyy-M"),
   YEARLY (Calendar.YEAR, 12 * MONTHLY.weight(), "yyyy");
 
   public static final long MIN_INTERVAL = SECONDLY.weight();
@@ -36,7 +37,7 @@ public enum UpdatePeriod implements Name
   }
 
   public long monthWeight(Date date) {
-    return DateUtils.getNumberofDaysInMonth(date) * DAILY.weight();
+    return DateUtil.getNumberofDaysInMonth(date) * DAILY.weight();
   }
 
   public String format() {
@@ -47,4 +48,25 @@ public enum UpdatePeriod implements Name
   public String getName() {
     return name();
   }
+
+  public static class UpdatePeriodComparator implements Comparator<UpdatePeriod> {
+    @Override
+    public int compare(UpdatePeriod o1, UpdatePeriod o2) {
+      if (o1 == null && o2 != null) {
+        return -1;
+      } else if (o1 != null && o2 == null) {
+        return 1;
+      } else if (o1 == null && o2 == null) {
+        return 0;
+      } else {
+        if (o1.weight > o2.weight) {
+          return 1;
+        } else if (o1.weight < o2.weight) {
+          return -1;
+        } else {
+          return 0;
+        }
+      }
+    }
+  }
 }

Added: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java?rev=1476977&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java (added)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java Mon Apr 29 11:51:15 2013
@@ -0,0 +1,292 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.log4j.Logger;
+
+
+
+public class DateUtil {
+  public static final Logger LOG = Logger.getLogger(DateUtil.class);
+
+  /*
+   * NOW -> new java.util.Date()
+   * NOW-7DAY -> a date one week earlier
+   * NOW (+-) <NUM>UNIT
+   * or Hardcoded dates in  DD-MM-YYYY hh:mm:ss,sss
+   */
+  public static final String RELATIVE = "(now){1}";
+  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
+
+  public static final String WSPACE = "\\s+";
+  public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
+
+  public static final String SIGNAGE = "\\+|\\-";
+  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
+
+  public static final String QUANTITY = "\\d+";
+  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
+
+  public static final String UNIT = "year|month|week|day|hour|minute|second";
+  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
+
+
+  public static final String RELDATE_VALIDATOR_STR = RELATIVE
+      + "(" + WSPACE + ")?"
+      + "((" + SIGNAGE +")"
+      + "(" + WSPACE + ")?"
+      + "(" + QUANTITY + ")(" + UNIT + ")){0,1}"
+      +"(s?)";
+
+  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
+
+  public static String YEAR_FMT = "[0-9]{4}";
+  public static String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
+  public static String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
+  public static String HOUR_FMT = DAY_FMT + " [0-9]{2}";
+  public static String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
+  public static String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
+  public static final String ABSDATE_FMT = "yyyy-MM-dd HH:mm:ss,SSS";
+  public static final SimpleDateFormat ABSDATE_PARSER = new SimpleDateFormat(ABSDATE_FMT);
+
+  public static String formatDate(Date dt) {
+    return ABSDATE_PARSER.format(dt);
+  }
+
+  public static String getAbsDateFormatString(String str) {
+    if (str.matches(YEAR_FMT)) {
+      return str + "-01-01 00:00:00,000";
+    } else if (str.matches(MONTH_FMT)) {
+      return str + "-01 00:00:00,000";
+    } else if (str.matches(DAY_FMT)) {
+      return str + " 00:00:00,000";
+    } else if (str.matches(HOUR_FMT)) {
+      return str + ":00:00,000";
+    } else if (str.matches(MINUTE_FMT)) {
+      return str + ":00,000";
+    } else if (str.matches(SECOND_FMT)) {
+      return str + ",000";
+    } else if (str.matches(ABSDATE_FMT)) {
+      return str;
+    }
+    throw new IllegalArgumentException("Unsupported formatting for date" + str);
+  }
+
+  public static Date resolveDate(String str, Date now) throws HiveException {
+    if (RELDATE_VALIDATOR.matcher(str).matches()) {
+      return resolveRelativeDate(str, now);
+    } else {
+      try {
+        return ABSDATE_PARSER.parse(getAbsDateFormatString(str));
+      } catch (ParseException e) {
+        LOG.error("Invalid date format. expected only " + ABSDATE_FMT
+            + " date provided:" + str, e);
+        throw new HiveException("Date parsing error. expected format "
+            + ABSDATE_FMT
+            + ", date provided: " + str
+            + ", failed because: " + e.getMessage());
+      }
+    }
+  }
+
+  private static Date resolveRelativeDate(String str, Date now) throws HiveException {
+    if (!(str == null || str.isEmpty())) {
+      throw new HiveException("date value cannot be null or empty:" + str);
+    }
+    // Get rid of whitespace
+    String raw = str.replaceAll(WSPACE, "").replaceAll(RELATIVE, "");
+
+    if (raw.isEmpty()) { // String is just "now"
+      return now;
+    }
+
+    Matcher qtyMatcher = P_QUANTITY.matcher(raw);
+    int qty = 1;
+    if (qtyMatcher.find() && true) {
+      qty =  Integer.parseInt(qtyMatcher.group());
+    }
+
+    Matcher signageMatcher = P_SIGNAGE.matcher(raw);
+    if (signageMatcher.find()) {
+      String sign = signageMatcher.group();
+      if ("-".equals(sign)) {
+        qty = -qty;
+      }
+    }
+
+    Matcher unitMatcher = P_UNIT.matcher(raw);
+    Calendar calendar = Calendar.getInstance();
+    calendar.setTime(now);
+
+    if (unitMatcher.find()) {
+      String unit = unitMatcher.group().toLowerCase();
+      if ("year".equals(unit)) {
+        calendar.add(Calendar.YEAR, qty);
+      } else if ("month".equals(unit)) {
+        calendar.add(Calendar.MONTH, qty);
+      } else if ("week".equals(unit)) {
+        calendar.add(Calendar.DAY_OF_MONTH, 7 * qty);
+      } else if ("day".equals(unit)) {
+        calendar.add(Calendar.DAY_OF_MONTH, qty);
+      } else if ("hour".equals(unit)) {
+        calendar.add(Calendar.HOUR_OF_DAY, qty);
+      } else if ("minute".equals(unit)) {
+        calendar.add(Calendar.MINUTE, qty);
+      } else if ("second".equals(unit)) {
+        calendar.add(Calendar.SECOND, qty);
+      } else {
+        throw new HiveException("invalid time unit: "+ unit);
+      }
+    }
+
+    return calendar.getTime();
+  }
+
+  public static Date getCeilDate(Date fromDate, UpdatePeriod interval) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(fromDate);
+    boolean hasFraction = false;
+    switch (interval) {
+    case YEARLY :
+      if (cal.get(Calendar.MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case MONTHLY :
+      if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case WEEKLY :
+      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case DAILY :
+      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case HOURLY :
+      if (cal.get(Calendar.MINUTE) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MINUTELY :
+      if (cal.get(Calendar.SECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case SECONDLY :
+      if (cal.get(Calendar.MILLISECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    }
+
+    if (hasFraction) {
+      cal.add(interval.calendarField(), 1);
+      return getFloorDate(cal.getTime(), interval);
+    } else {
+      return fromDate;
+    }
+  }
+
+  public static Date getFloorDate(Date toDate, UpdatePeriod interval) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(toDate);
+    switch (interval) {
+    case YEARLY :
+      cal.set(Calendar.MONTH, 1);
+    case MONTHLY :
+      cal.set(Calendar.DAY_OF_MONTH, 1);
+    case DAILY :
+      cal.set(Calendar.HOUR_OF_DAY, 0);
+    case HOURLY :
+      cal.set(Calendar.MINUTE, 0);
+    case MINUTELY :
+      cal.set(Calendar.SECOND, 0);
+    case SECONDLY :
+      break;
+    case WEEKLY :
+      cal.set(Calendar.DAY_OF_WEEK, 1);
+      cal.set(Calendar.HOUR_OF_DAY, 0);
+      cal.set(Calendar.MINUTE, 0);
+      cal.set(Calendar.SECOND, 0);
+      break;
+    }
+    System.out.println("Date:" + toDate + " Floordate for interval:" + interval + " is " + cal.getTime());
+    return cal.getTime();
+  }
+
+  public static int getNumberofDaysInMonth(Date date) {
+    Calendar calendar = Calendar.getInstance();
+    calendar.setTime(date);
+    return calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
+  }
+
+  public static int getMonthsBetween(Date from, Date to) {
+    from = DateUtils.round(from, Calendar.MONTH);
+    to = DateUtils.truncate(to, Calendar.MONTH);
+
+    int months = 0;
+    from = DateUtils.addMonths(from, 1);
+
+    while (to.after(from)) {
+      from = DateUtils.addMonths(from, 1);
+      months++;
+    }
+    return months;
+  }
+
+  public static int getQuartersBetween(Date from, Date to) {
+
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromQtr = cal.get(Calendar.MONTH) / 3 + 1;
+    int fromYear = cal.get(Calendar.YEAR);
+    cal.setTime(to);
+    int toQtr = cal.get(Calendar.MONTH) / 3 + 1;
+    int toYear = cal.get(Calendar.YEAR);
+
+
+    if (fromYear == toYear) {
+      if (fromQtr == toQtr) {
+        return 0;
+      } else {
+        return toQtr - fromQtr - 1;
+      }
+    } else {
+      from = DateUtils.round(from, Calendar.YEAR);
+      to = DateUtils.truncate(to, Calendar.YEAR);
+      int quarters = 0;
+      from = DateUtils.addYears(from, 1);
+      while (to.after(from)) {
+        from = DateUtils.addYears(from, 1);
+        quarters += 4;
+      }
+      return quarters + (4 - fromQtr) + (toQtr - 1);
+    }
+  }
+
+  public static int getYearsBetween(Date from, Date to) {
+    from = DateUtils.round(from, Calendar.YEAR);
+    to = DateUtils.truncate(to, Calendar.YEAR);
+    int years = 0;
+    from = DateUtils.addYears(from, 1);
+
+    while (to.after(from)) {
+      from = DateUtils.addYears(from, 1);
+      years++;
+    }
+    return years;
+  }
+}

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1476977&r1=1476976&r2=1476977&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Mon Apr 29 11:51:15 2013
@@ -53,8 +53,8 @@ public class PartitionResolver implement
     }
 
     System.out.println("fact: " + fact.getName() + " max interval:" + interval);
-    Date ceilFromDate = DateUtils.getCeilDate(fromDate, interval);
-    Date floorToDate = DateUtils.getFloorDate(toDate, interval);
+    Date ceilFromDate = DateUtil.getCeilDate(fromDate, interval);
+    Date floorToDate = DateUtil.getFloorDate(toDate, interval);
     List<String> partitions = fact.getPartitions(ceilFromDate, floorToDate,
         interval);
     if (partitions != null) {

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1476977&r1=1476976&r2=1476977&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Mon Apr 29 11:51:15 2013
@@ -99,6 +99,8 @@ public class CubeTestSetup {
     updates.add(UpdatePeriod.HOURLY);
     updates.add(UpdatePeriod.DAILY);
     updates.add(UpdatePeriod.MONTHLY);
+    updates.add(UpdatePeriod.QUARTERLY);
+    updates.add(UpdatePeriod.YEARLY);
     Storage hdfsStorage = new HDFSStorage("C1",
         TextInputFormat.class.getCanonicalName(),
         HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
@@ -156,7 +158,7 @@ public class CubeTestSetup {
     Map<Storage, UpdatePeriod> snapshotDumpPeriods =
         new HashMap<Storage, UpdatePeriod>();
     snapshotDumpPeriods.put(hdfsStorage1, UpdatePeriod.HOURLY);
-    snapshotDumpPeriods.put(hdfsStorage2, null);
+    snapshotDumpPeriods.put(hdfsStorage2, UpdatePeriod.MINUTELY);
     client.createCubeDimensionTable(dimName, dimColumns, dimensionReferences,
         snapshotDumpPeriods);
   }

Added: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestDateUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestDateUtil.java?rev=1476977&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestDateUtil.java (added)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestDateUtil.java Mon Apr 29 11:51:15 2013
@@ -0,0 +1,105 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+/**
+ * Unit tests for cube DateUtil class
+ * TestDateUtil.
+ *
+ */
+public class TestDateUtil {
+  public static final String[] testpairs = {
+    "2013-Jan-01", "2013-Jan-31",
+    "2013-Jan-01", "2013-May-31",
+    "2013-Jan-01", "2013-Dec-31",
+    "2013-Feb-01", "2013-Apr-25",
+    "2012-Feb-01", "2013-Feb-01",
+    "2011-Feb-01", "2013-Feb-01"
+  };
+
+  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
+
+  private final Date pairs[];
+
+  public TestDateUtil() {
+    pairs = new Date[testpairs.length];
+    for (int i = 0; i < testpairs.length; i++) {
+      try {
+        pairs[i] = DATE_FMT.parse(testpairs[i]);
+      } catch (ParseException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  @Test
+  public void testMonthsBetween() throws Exception {
+    int i = 0;
+    Assert.assertEquals(0, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(3, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(10, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(1, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(11, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(23, DateUtil.getMonthsBetween(pairs[i], pairs[i+1]));
+  }
+
+  @Test
+  public void testQuartersBetween() throws Exception {
+    int i = 0;
+    Assert.assertEquals(0, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(2, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(3, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(7, DateUtil.getQuartersBetween(pairs[i], pairs[i+1]));
+  }
+
+
+  @Test
+  public void testYearsBetween() throws Exception {
+    int i = 0;
+    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(0, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(1, DateUtil.getYearsBetween(pairs[i], pairs[i+1]));
+  }
+
+}

Added: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java?rev=1476977&view=auto
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java (added)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestMaxUpdateInterval.java Mon Apr 29 11:51:15 2013
@@ -0,0 +1,78 @@
+package org.apache.hadoop.hive.ql.cube.parse;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeFactTable;
+import org.apache.hadoop.hive.ql.cube.metadata.CubeMetastoreClient;
+import org.apache.hadoop.hive.ql.cube.metadata.UpdatePeriod;
+import org.junit.Assert;
+import org.junit.Test;
+
+/*
+ * Unit test for maxUpdateIntervalIn method in CubeFactTable
+ */
+public class TestMaxUpdateInterval<periods> {
+  public static final String[] testpairs = {
+    "2013-Jan-01", "2013-Jan-31",
+    "2013-Jan-01", "2013-May-31",
+    "2013-Jan-01", "2013-Dec-31",
+    "2013-Feb-01", "2013-Apr-25",
+    "2012-Feb-01", "2013-Feb-01",
+    "2011-Feb-01", "2013-Feb-01"
+  };
+
+  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
+
+  private final Date pairs[];
+
+  public TestMaxUpdateInterval () {
+    pairs = new Date[testpairs.length];
+    for (int i = 0; i < testpairs.length; i++) {
+      try {
+        pairs[i] = DATE_FMT.parse(testpairs[i]);
+        System.out.println(pairs[i].toString());
+      } catch (ParseException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  @Test
+  public void testMaxUpdatePeriodInInterval() throws Exception {
+    CubeTestSetup setup = new CubeTestSetup();
+    setup.createSources();
+
+    CubeMetastoreClient client =  CubeMetastoreClient.getInstance(
+        new HiveConf(this.getClass()));
+
+    CubeFactTable fact = client.getFactTable("testFact");
+    List<UpdatePeriod> allPeriods = new ArrayList<UpdatePeriod>();
+    for (List<UpdatePeriod >periods : fact.getUpdatePeriods().values()) {
+      allPeriods.addAll(periods);
+    }
+
+    int i = 0;
+    Assert.assertEquals(UpdatePeriod.DAILY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(UpdatePeriod.MONTHLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(UpdatePeriod.QUARTERLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(UpdatePeriod.MONTHLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(UpdatePeriod.QUARTERLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+
+    i+=2;
+    Assert.assertEquals(UpdatePeriod.YEARLY, fact.maxIntervalInRange(pairs[i], pairs[i+1]));
+  }
+
+}

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1476977&r1=1476976&r2=1476977&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Mon Apr 29 11:51:15 2013
@@ -219,9 +219,9 @@ public class TestCubeDriver {
 
     // TODO this should consider only two month partitions. Month weight needs
     // to be fixed.
-   // hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-   //     " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
-   //     + "','" + getDateUptoMonth(now) + "')");
+    hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
+        " where time_range_in('" + getDateUptoMonth(twoMonthsBack)
+        + "','" + getDateUptoMonth(now) + "')");
 
   }