You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/05/21 11:43:11 UTC
incubator-lens git commit: LENS-566 : Add start time for fact as a
fact property (Rajat Khandelwal via amareshwari)
Repository: incubator-lens
Updated Branches:
refs/heads/master bc892f5ba -> 76638a1fb
LENS-566 : Add start time for fact as a fact property (Rajat Khandelwal via amareshwari)
Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/76638a1f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/76638a1f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/76638a1f
Branch: refs/heads/master
Commit: 76638a1fb54e97f7a5fa095b4f2632f11c6bb972
Parents: bc892f5
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu May 21 15:11:58 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu May 21 15:11:58 2015 +0530
----------------------------------------------------------------------
lens-api/src/main/resources/cube-0.1.xsd | 9 +++
.../lens/cube/metadata/CubeFactTable.java | 34 ++++++++
.../lens/cube/metadata/MetastoreConstants.java | 3 +
.../lens/cube/parse/AggregateResolver.java | 4 +-
.../apache/lens/cube/parse/CandidateFact.java | 8 +-
.../cube/parse/CandidateTablePruneCause.java | 28 +++++--
.../lens/cube/parse/CandidateTableResolver.java | 26 +++---
.../lens/cube/parse/CubeQueryContext.java | 4 -
.../org/apache/lens/cube/parse/DateUtil.java | 39 ++++++---
.../cube/parse/DenormalizationResolver.java | 6 +-
.../apache/lens/cube/parse/JoinResolver.java | 4 +-
.../lens/cube/parse/StorageTableResolver.java | 10 +--
.../org/apache/lens/cube/parse/TimeRange.java | 34 +++-----
.../lens/cube/parse/TimerangeResolver.java | 46 ++++++++---
.../apache/lens/cube/parse/CubeTestSetup.java | 46 ++++++++---
.../lens/cube/parse/TestAggregateResolver.java | 56 ++++++-------
.../lens/cube/parse/TestBaseCubeQueries.java | 2 +-
.../apache/lens/cube/parse/TestDateUtil.java | 56 +++++++------
.../lens/cube/parse/TestTimeRangeResolver.java | 85 ++++++++++++++++++++
19 files changed, 350 insertions(+), 150 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 06f85ee..1918e5c 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -1125,6 +1125,15 @@
2. cube.fact.is.aggregated : Defaults to true. If the fact is a raw fact, this should be set to false,
otherwise true.
3. cube.timedim.relation.{time_dim1}: See the same property in cube. Fact tables can override the property.
+ 4. cube.fact.absolute.start.time: start time of the fact. For queries that ask for time before this,
+ this fact is not a candidate. Time format can be as you would specify in the time_range_in clause.
+ i.e. yyyy[-mm[-dd[-hh[:MM[:ss[,SSS]]]]]]
+ 5. cube.fact.relative.start.time: Here you can specify fact's relative validity relative to current time.
+ Useful if you want to specify e.g. this fact is valid for today - 90 days. Can be specified as just
+ a time difference e.g. "-90 days". Or can be specified in relative syntax.
+ e.g. now.year or now.day - 6 hour etc.
+ 6. cube.fact.absolute.end.time: If you're deprecating this fact, put the final date till which the data of
+ the fact will be valid here. Format same as absolute start time.
</xs:documentation>
</xs:annotation>
</xs:element>
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index 9daccec..5ea715d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
+import com.google.common.collect.Lists;
+
public final class CubeFactTable extends AbstractCubeTable {
private String cubeName;
private final Map<String, Set<UpdatePeriod>> storageUpdatePeriods;
@@ -318,4 +320,36 @@ public final class CubeFactTable extends AbstractCubeTable {
public void setAggregated(boolean isAggregated) {
getProperties().put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, Boolean.toString(isAggregated));
}
+
+ public Date getAbsoluteStartTime() {
+ try {
+ return DateUtil.resolveAbsoluteDate(getProperties().get(MetastoreConstants.FACT_ABSOLUTE_START_TIME));
+ } catch (Exception e) {
+ return new Date(Long.MIN_VALUE);
+ }
+ }
+
+ public Date getRelativeStartTime() {
+ try {
+ return DateUtil.resolveRelativeDate(getProperties().get(MetastoreConstants.FACT_RELATIVE_START_TIME), new Date());
+ } catch (Exception e) {
+ return new Date(Long.MIN_VALUE);
+ }
+ }
+
+ public Date getStartTime() {
+ return Collections.max(Lists.newArrayList(getRelativeStartTime(), getAbsoluteStartTime()));
+ }
+
+ public Date getAbsoluteEndTime() {
+ try {
+ return DateUtil.resolveAbsoluteDate(getProperties().get(MetastoreConstants.FACT_ABSOLUTE_END_TIME));
+ } catch (Exception e) {
+ return new Date(Long.MAX_VALUE);
+ }
+ }
+
+ public Date getEndTime() {
+ return getAbsoluteEndTime();
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
index 1fe55d9..da47fa5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
@@ -51,6 +51,9 @@ public final class MetastoreConstants {
public static final String CUBE_NAME_SFX = ".cubename";
public static final String VALID_COLUMNS_SFX = ".valid.columns";
public static final String FACT_AGGREGATED_PROPERTY = "cube.fact.is.aggregated";
+ public static final String FACT_ABSOLUTE_START_TIME = "cube.fact.absolute.start.time";
+ public static final String FACT_RELATIVE_START_TIME = "cube.fact.relative.start.time";
+ public static final String FACT_ABSOLUTE_END_TIME = "cube.fact.absolute.end.time";
// dim table constants
// TODO: remove this and move to "dimtable."
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index a25fae6..76b5729 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -72,12 +72,12 @@ class AggregateResolver implements ContextRewriter {
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getHavingAST(), null, aggregateResolverDisabled)
|| hasMeasures(cubeql, cubeql.getWhereAST()) || hasMeasures(cubeql, cubeql.getGroupByAST())
|| hasMeasures(cubeql, cubeql.getOrderByAST())) {
- Iterator<CandidateFact> factItr = cubeql.getCandidateFactTables().iterator();
+ Iterator<CandidateFact> factItr = cubeql.getCandidateFacts().iterator();
while (factItr.hasNext()) {
CandidateFact candidate = factItr.next();
if (candidate.fact.isAggregated()) {
cubeql.addFactPruningMsgs(candidate.fact,
- CandidateTablePruneCause.missingDefaultAggregate(candidate.fact.getName()));
+ CandidateTablePruneCause.missingDefaultAggregate());
factItr.remove();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 84e5341..dab01b3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -80,10 +80,10 @@ public class CandidateFact implements CandidateTable {
private final Map<String, String> storgeWhereClauseMap = new HashMap<String, String>();
@Getter
private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap =
- new HashMap<TimeRange, Map<String, LinkedHashSet<FactPartition>>>();
+ new HashMap<TimeRange, Map<String, LinkedHashSet<FactPartition>>>();
@Getter
private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap =
- new HashMap<TimeRange, Map<String, String>>();
+ new HashMap<TimeRange, Map<String, String>>();
CandidateFact(CubeFactTable fact, CubeInterface cube) {
this.fact = fact;
@@ -105,6 +105,10 @@ public class CandidateFact implements CandidateTable {
return columns;
}
+ public boolean isValidForTimeRange(TimeRange timeRange) {
+ return (!timeRange.getFromDate().before(fact.getStartTime())) && (!timeRange.getToDate().after(fact.getEndTime()));
+ }
+
static class TimeRangeNode {
ASTNode timenode;
ASTNode parent;
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index a1fea16..8c009b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -22,6 +22,7 @@ import java.util.*;
import org.codehaus.jackson.annotate.JsonWriteNullProperties;
+import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -36,9 +37,20 @@ import lombok.NoArgsConstructor;
public class CandidateTablePruneCause {
-
-
public enum CandidateTablePruneCode {
+ FACT_NOT_AVAILABLE_IN_RANGE("No facts available for all of these time ranges: %s") {
+ @Override
+ Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
+ Set<TimeRange> allRanges = Sets.newHashSet();
+ for (CandidateTablePruneCause cause : causes) {
+ allRanges.addAll(cause.getInvalidRanges());
+ }
+ return new Object[]{
+ allRanges.toString(),
+ };
+ }
+ },
+ // least weight not satisfied
MORE_WEIGHT("Picked table had more weight than minimum."),
// partial data is enabled, another fact has more data.
LESS_DATA("Picked table has less data than the maximum"),
@@ -201,12 +213,19 @@ public class CandidateTablePruneCause {
// time covered
private MaxCoveringFactResolver.TimeCovered maxTimeCovered;
+ // ranges in which fact is invalid
+ private List<TimeRange> invalidRanges;
public CandidateTablePruneCause(CandidateTablePruneCode cause) {
this.cause = cause;
}
// Different static constructors for different causes.
+ public static CandidateTablePruneCause factNotAvailableInRange(List<TimeRange> ranges) {
+ CandidateTablePruneCause cause = new CandidateTablePruneCause(CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE);
+ cause.invalidRanges = ranges;
+ return cause;
+ }
public static CandidateTablePruneCause columnNotFound(Collection<String> missingColumns) {
List<String> colList = new ArrayList<String>();
@@ -261,10 +280,7 @@ public class CandidateTablePruneCause {
public static CandidateTablePruneCause missingDefaultAggregate(String... names) {
CandidateTablePruneCause cause = new CandidateTablePruneCause(CandidateTablePruneCode.MISSING_DEFAULT_AGGREGATE);
- cause.setColumnsMissingDefaultAggregate(new ArrayList<String>());
- for (String name : names) {
- cause.getColumnsMissingDefaultAggregate().add(name);
- }
+ cause.setColumnsMissingDefaultAggregate(Lists.newArrayList(names));
return cause;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 6b6a09b..f79e7e7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -73,7 +73,7 @@ class CandidateTableResolver implements ContextRewriter {
if (cubeql.getAutoJoinCtx() != null) {
// Before checking for candidate table columns, prune join paths containing non existing columns
// in populated candidate tables
- cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(), cubeql.getCandidateFactTables(), null);
+ cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(), cubeql.getCandidateFacts(), null);
cubeql.getAutoJoinCtx().pruneAllPathsForCandidateDims(cubeql.getCandidateDimTables());
cubeql.getAutoJoinCtx().refreshJoinPathColumns();
}
@@ -97,9 +97,9 @@ class CandidateTableResolver implements ContextRewriter {
for (CubeFactTable fact : factTables) {
CandidateFact cfact = new CandidateFact(fact, cubeql.getCube());
cfact.setEnabledMultiTableSelect(qlEnabledMultiTableSelect);
- cubeql.getCandidateFactTables().add(cfact);
+ cubeql.getCandidateFacts().add(cfact);
}
- LOG.info("Populated candidate facts:" + cubeql.getCandidateFactTables());
+ LOG.info("Populated candidate facts:" + cubeql.getCandidateFacts());
}
if (cubeql.getDimensions().size() != 0) {
@@ -142,7 +142,7 @@ class CandidateTableResolver implements ContextRewriter {
private void pruneOptionalDims(CubeQueryContext cubeql) {
Set<Dimension> tobeRemoved = new HashSet<Dimension>();
Set<CandidateTable> allCandidates = new HashSet<CandidateTable>();
- allCandidates.addAll(cubeql.getCandidateFactTables());
+ allCandidates.addAll(cubeql.getCandidateFacts());
for (Set<CandidateDim> cdims : cubeql.getCandidateDimTables().values()) {
allCandidates.addAll(cdims);
}
@@ -182,7 +182,7 @@ class CandidateTableResolver implements ContextRewriter {
for (CandidateTable candidate : optdim.requiredForCandidates) {
if (candidate instanceof CandidateFact) {
LOG.info("Not considering fact:" + candidate + " as refered table does not have any valid dimtables");
- cubeql.getCandidateFactTables().remove(candidate);
+ cubeql.getCandidateFacts().remove(candidate);
cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact, new CandidateTablePruneCause(
CandidateTablePruneCode.INVALID_DENORM_TABLE));
} else {
@@ -207,7 +207,7 @@ class CandidateTableResolver implements ContextRewriter {
Set<String> queriedMsrs = cubeql.getQueriedMsrs();
// Remove fact tables based on columns in the query
- for (Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+ for (Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator(); i.hasNext();) {
CandidateFact cfact = i.next();
if (validFactTables != null) {
@@ -265,7 +265,7 @@ class CandidateTableResolver implements ContextRewriter {
}
// Find out candidate fact table sets which contain all the measures
// queried
- List<CandidateFact> cfacts = new ArrayList<CandidateFact>(cubeql.getCandidateFactTables());
+ List<CandidateFact> cfacts = new ArrayList<CandidateFact>(cubeql.getCandidateFacts());
Set<Set<CandidateFact>> cfactset = findCoveringSets(cfacts, queriedMsrs);
LOG.info("Measure covering fact sets :" + cfactset);
if (cfactset.isEmpty()) {
@@ -274,7 +274,7 @@ class CandidateTableResolver implements ContextRewriter {
cubeql.getCandidateFactSets().addAll(cfactset);
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.COLUMN_NOT_FOUND);
- if (cubeql.getCandidateFactTables().size() == 0) {
+ if (cubeql.getCandidateFacts().size() == 0) {
throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, queriedDimAttrs.toString());
}
}
@@ -402,8 +402,8 @@ class CandidateTableResolver implements ContextRewriter {
return;
}
Collection<String> colSet = null;
- if (cubeql.getCube() != null && !cubeql.getCandidateFactTables().isEmpty()) {
- for (Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+ if (cubeql.getCube() != null && !cubeql.getCandidateFacts().isEmpty()) {
+ for (Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator(); i.hasNext();) {
CandidateFact cfact = i.next();
CubeFactTable fact = cfact.fact;
@@ -426,7 +426,7 @@ class CandidateTableResolver implements ContextRewriter {
}
}
}
- if (cubeql.getCandidateFactTables().size() == 0) {
+ if (cubeql.getCandidateFacts().size() == 0) {
throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, colSet == null ? "NULL" : colSet.toString());
}
}
@@ -482,9 +482,9 @@ class CandidateTableResolver implements ContextRewriter {
for (CandidateTable candidate : removedCandidates.keySet()) {
if (!candidatesReachableThroughRefs.contains(candidate)) {
if (candidate instanceof CandidateFact) {
- if (cubeql.getCandidateFactTables().contains(candidate)) {
+ if (cubeql.getCandidateFacts().contains(candidate)) {
LOG.info("Not considering fact:" + candidate + " as is not reachable through any optional dim");
- cubeql.getCandidateFactTables().remove(candidate);
+ cubeql.getCandidateFacts().remove(candidate);
cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact,
CandidateTablePruneCause.columnNotFound(removedCandidates.get(candidate)));
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 38b6429..3964c1a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -379,10 +379,6 @@ public class CubeQueryContext {
return clauseName;
}
- public Set<CandidateFact> getCandidateFactTables() {
- return candidateFacts;
- }
-
public Map<Dimension, Set<CandidateDim>> getCandidateDimTables() {
return candidateDims;
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index b11de10..9931e7c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -57,6 +57,7 @@ public final class DateUtil {
public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
public static final String WSPACE = "\\s+";
+ public static final String OPTIONAL_WSPACE = "\\s*";
public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
public static final String SIGNAGE = "\\+|\\-";
@@ -67,8 +68,8 @@ public final class DateUtil {
public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
- public static final String RELDATE_VALIDATOR_STR = RELATIVE + "(" + WSPACE + ")?" + "((" + SIGNAGE + ")" + "("
- + WSPACE + ")?" + "(" + QUANTITY + ")(" + UNIT + ")){0,1}" + "(s?)";
+ public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
+ + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + ")){0,1}" + "(s?)";
public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
@@ -78,6 +79,7 @@ public final class DateUtil {
public static final String HOUR_FMT = DAY_FMT + "-[0-9]{2}";
public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
+ public static final String MILLISECOND_FMT = SECOND_FMT + ",[0-9]{3}";
public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
@@ -113,7 +115,7 @@ public final class DateUtil {
return str + ":00,000";
} else if (str.matches(SECOND_FMT)) {
return str + ",000";
- } else if (str.matches(ABSDATE_FMT)) {
+ } else if (str.matches(MILLISECOND_FMT)) {
return str;
}
throw new IllegalArgumentException("Unsupported formatting for date" + str);
@@ -123,12 +125,26 @@ public final class DateUtil {
if (RELDATE_VALIDATOR.matcher(str).matches()) {
return resolveRelativeDate(str, now);
} else {
- try {
- return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
- } catch (ParseException e) {
- LOG.error("Invalid date format. expected only " + ABSDATE_FMT + " date provided:" + str, e);
- throw new SemanticException(e, ErrorMsg.WRONG_TIME_RANGE_FORMAT, ABSDATE_FMT, str);
- }
+ return resolveAbsoluteDate(str);
+ }
+ }
+ public static String relativeToAbsolute(String relative) throws SemanticException {
+ return relativeToAbsolute(relative, new Date());
+ }
+ public static String relativeToAbsolute(String relative, Date now) throws SemanticException {
+ if (RELDATE_VALIDATOR.matcher(relative).matches()) {
+ return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
+ } else {
+ return relative;
+ }
+ }
+
+ public static Date resolveAbsoluteDate(String str) throws SemanticException {
+ try {
+ return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
+ } catch (ParseException e) {
+ LOG.error("Invalid date format. expected only " + ABSDATE_FMT + " date provided:" + str, e);
+ throw new SemanticException(e, ErrorMsg.WRONG_TIME_RANGE_FORMAT, ABSDATE_FMT, str);
}
}
@@ -414,8 +430,9 @@ public final class DateUtil {
this.coverable = coverable;
}
}
+
@EqualsAndHashCode
- static class TimeDiff{
+ static class TimeDiff {
int quantity;
int calendarField;
@@ -467,8 +484,10 @@ public final class DateUtil {
public Date offsetFrom(Date time) {
return DateUtils.add(time, calendarField, quantity);
}
+
public Date negativeOffsetFrom(Date time) {
return DateUtils.add(time, calendarField, -quantity);
}
}
+
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 7857868..e0f7bea 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -356,8 +356,8 @@ public class DenormalizationResolver implements ContextRewriter {
// In the second iteration of denorm resolver
// candidate tables which require denorm fields and the refernces are no
// more valid will be pruned
- if (cubeql.getCube() != null && !cubeql.getCandidateFactTables().isEmpty()) {
- for (Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+ if (cubeql.getCube() != null && !cubeql.getCandidateFacts().isEmpty()) {
+ for (Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator(); i.hasNext();) {
CandidateFact cfact = i.next();
if (denormCtx.tableToRefCols.containsKey(cfact.getName())) {
for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(cfact.getName())) {
@@ -369,7 +369,7 @@ public class DenormalizationResolver implements ContextRewriter {
}
}
}
- if (cubeql.getCandidateFactTables().size() == 0) {
+ if (cubeql.getCandidateFacts().size() == 0) {
throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, cubeql.getColumnsQueried(cubeql.getCube().getName())
.toString());
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 3e3534c..17d2eed 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -1056,9 +1056,9 @@ class JoinResolver implements ContextRewriter {
Set<CandidateTable> candidates = cubeql.getOptionalDimensionMap().get(joinee).requiredForCandidates;
for (CandidateTable candidate : candidates) {
if (candidate instanceof CandidateFact) {
- if (cubeql.getCandidateFactTables().contains(candidate)) {
+ if (cubeql.getCandidateFacts().contains(candidate)) {
LOG.info("Not considering fact:" + candidate + " as there is no join path to " + joinee);
- cubeql.getCandidateFactTables().remove(candidate);
+ cubeql.getCandidateFacts().remove(candidate);
cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact, new CandidateTablePruneCause(
CandidateTablePruneCode.COLUMN_NOT_FOUND));
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 5d0e15b..2a60310 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -127,14 +127,14 @@ class StorageTableResolver implements ContextRewriter {
switch (phase) {
case FACT_TABLES:
- if (!cubeql.getCandidateFactTables().isEmpty()) {
+ if (!cubeql.getCandidateFacts().isEmpty()) {
// resolve storage table names
resolveFactStorageTableNames(cubeql);
}
cubeql.pruneCandidateFactSet(CandidateTablePruneCode.NO_CANDIDATE_STORAGES);
break;
case FACT_PARTITIONS:
- if (!cubeql.getCandidateFactTables().isEmpty()) {
+ if (!cubeql.getCandidateFacts().isEmpty()) {
// resolve storage partitions
resolveFactStoragePartitions(cubeql);
}
@@ -144,7 +144,7 @@ class StorageTableResolver implements ContextRewriter {
resolveDimStorageTablesAndPartitions(cubeql);
if (cubeql.getAutoJoinCtx() != null) {
// After all candidates are pruned after storage resolver, prune join paths.
- cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(), cubeql.getCandidateFactTables(), null);
+ cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(), cubeql.getCandidateFacts(), null);
cubeql.getAutoJoinCtx().pruneAllPathsForCandidateDims(cubeql.getCandidateDimTables());
cubeql.getAutoJoinCtx().refreshJoinPathColumns();
}
@@ -231,7 +231,7 @@ class StorageTableResolver implements ContextRewriter {
// Resolves all the storage table names, which are valid for each updatePeriod
private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws SemanticException {
- Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator();
+ Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
while (i.hasNext()) {
CubeFactTable fact = i.next().fact;
if (fact.getUpdatePeriods().isEmpty()) {
@@ -351,7 +351,7 @@ class StorageTableResolver implements ContextRewriter {
private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws SemanticException {
// Find candidate tables wrt supported storages
- Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator();
+ Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
Map<TimeRange, String> whereClasueForFallback = new LinkedHashMap<TimeRange, String>();
while (i.hasNext()) {
CandidateFact cfact = i.next();
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
index c9c9cc6..03732cb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
@@ -18,6 +18,8 @@
*/
package org.apache.lens.cube.parse;
+import static org.apache.lens.cube.parse.DateUtil.ABSDATE_PARSER;
+
import java.util.Calendar;
import java.util.Date;
import java.util.TreeSet;
@@ -29,11 +31,16 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+
+import lombok.Data;
import lombok.Getter;
/**
* Timerange data structure
*/
+@JsonIgnoreProperties({"astNode", "parent"})
+@Data
public class TimeRange {
private String partitionColumn;
private Date toDate;
@@ -97,30 +104,6 @@ public class TimeRange {
}
- public String getPartitionColumn() {
- return partitionColumn;
- }
-
- public Date getFromDate() {
- return fromDate;
- }
-
- public Date getToDate() {
- return toDate;
- }
-
- public ASTNode getASTNode() {
- return astNode;
- }
-
- public ASTNode getParent() {
- return parent;
- }
-
- public int getChildIndex() {
- return childIndex;
- }
-
public void validate() throws SemanticException {
if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
throw new SemanticException(ErrorMsg.INVALID_TIME_RANGE);
@@ -148,7 +131,8 @@ public class TimeRange {
@Override
public String toString() {
- return partitionColumn + " [" + fromDate + ":" + toDate + "]";
+ return partitionColumn + " [" + ABSDATE_PARSER.get().format(fromDate) + " to "
+ + ABSDATE_PARSER.get().format(toDate) + ")";
}
/** iterable from fromDate(including) to toDate(excluding) incrementing increment units of updatePeriod */
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index e5e7c56..936faa1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -18,6 +18,8 @@
*/
package org.apache.lens.cube.parse;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
+
import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
import java.util.*;
@@ -32,20 +34,20 @@ import org.apache.lens.cube.parse.DenormalizationResolver.ReferencedQueriedColum
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+
/**
* Finds all timeranges in the query and does validation wrt the queried field's life and the range queried
*/
+@Slf4j
class TimerangeResolver implements ContextRewriter {
- public static final Log LOG = LogFactory.getLog(TimerangeResolver.class.getName());
-
public TimerangeResolver(Configuration conf) {
}
@@ -56,8 +58,10 @@ class TimerangeResolver implements ContextRewriter {
}
extractTimeRange(cubeql);
doColLifeValidation(cubeql);
+ doFactRangeValidation(cubeql);
}
+
private void extractTimeRange(CubeQueryContext cubeql) throws SemanticException {
// get time range -
// Time range should be direct child of where condition
@@ -140,7 +144,7 @@ class TimerangeResolver implements ContextRewriter {
}
private void doColLifeValidation(CubeQueryContext cubeql) throws SemanticException,
- ColUnAvailableInTimeRangeException {
+ ColUnAvailableInTimeRangeException {
Set<String> cubeColumns = cubeql.getColumnsQueried(cubeql.getCube().getName());
if (cubeColumns == null || cubeColumns.isEmpty()) {
// Query doesn't have any columns from cube
@@ -171,7 +175,7 @@ class TimerangeResolver implements ContextRewriter {
ReferencedQueriedColumn refCol = refColIter.next();
for (TimeRange range : cubeql.getTimeRanges()) {
if (!refCol.col.isColumnAvailableInTimeRange(range)) {
- LOG.debug("The refernced column:" + refCol.col.getName() + " is not in the range queried");
+ log.debug("The refernced column:" + refCol.col.getName() + " is not in the range queried");
refColIter.remove();
break;
}
@@ -195,7 +199,7 @@ class TimerangeResolver implements ContextRewriter {
CubeColumn column = cubeql.getCube().getColumnByName(col);
for (TimeRange range : cubeql.getTimeRanges()) {
if (!column.isColumnAvailableInTimeRange(range)) {
- LOG.info("Timerange queried is not in column life for " + column
+ log.info("Timerange queried is not in column life for " + column
+ ", Removing join paths containing the column");
// Remove join paths containing this column
Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths = joinContext.getAllPaths();
@@ -207,7 +211,7 @@ class TimerangeResolver implements ContextRewriter {
while (joinPathIterator.hasNext()) {
SchemaGraph.JoinPath path = joinPathIterator.next();
if (path.containsColumnOfTable(col, (AbstractCubeTable) cubeql.getCube())) {
- LOG.info("Removing join path:" + path + " as columns :" + col + " is not available in the range");
+ log.info("Removing join path:" + path + " as columns :" + col + " is not available in the range");
joinPathIterator.remove();
if (joinPaths.isEmpty()) {
// This dimension doesn't have any paths left
@@ -224,17 +228,37 @@ class TimerangeResolver implements ContextRewriter {
}
+
private void throwException(CubeColumn column) throws ColUnAvailableInTimeRangeException {
final Long availabilityStartTime = (column.getStartTimeMillisSinceEpoch().isPresent())
- ? column.getStartTimeMillisSinceEpoch().get() : null;
+ ? column.getStartTimeMillisSinceEpoch().get() : null;
final Long availabilityEndTime = column.getEndTimeMillisSinceEpoch().isPresent()
- ? column.getEndTimeMillisSinceEpoch().get() : null;
+ ? column.getEndTimeMillisSinceEpoch().get() : null;
ColUnAvailableInTimeRange col = new ColUnAvailableInTimeRange(column.getName(), availabilityStartTime,
- availabilityEndTime);
+ availabilityEndTime);
throw new ColUnAvailableInTimeRangeException(col);
}
+
+ private void doFactRangeValidation(CubeQueryContext cubeql) {
+ Iterator<CandidateFact> iter = cubeql.getCandidateFacts().iterator();
+ while (iter.hasNext()) {
+ CandidateFact cfact = iter.next();
+ List<TimeRange> invalidTimeRanges = Lists.newArrayList();
+ for (TimeRange timeRange : cubeql.getTimeRanges()) {
+ if (!cfact.isValidForTimeRange(timeRange)) {
+ invalidTimeRanges.add(timeRange);
+ }
+ }
+ if (!invalidTimeRanges.isEmpty()){
+ cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.factNotAvailableInRange(invalidTimeRanges));
+ log.info("Not considering " + cfact + " as it's not available for time ranges: " + invalidTimeRanges);
+ iter.remove();
+ }
+ }
+ cubeql.pruneCandidateFactSet(FACT_NOT_AVAILABLE_IN_RANGE);
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 5737057..69bd57e 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -105,12 +105,15 @@ public class CubeTestSetup {
public static final Date BEFORE_4_DAYS_END;
public static final Date THIS_YEAR_START;
public static final Date THIS_YEAR_END;
+ public static final Date LAST_YEAR_START;
+ public static final Date LAST_YEAR_END;
// Time Ranges
public static final String LAST_HOUR_TIME_RANGE;
public static final String TWO_DAYS_RANGE;
public static final String TWO_DAYS_RANGE_TTD;
public static final String THIS_YEAR_RANGE;
+ public static final String LAST_YEAR_RANGE;
public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
public static final String TWO_DAYS_RANGE_BEFORE_4_DAYS;
@@ -121,6 +124,7 @@ public class CubeTestSetup {
private static String c3 = "C3";
private static String c4 = "C4";
private static String c99 = "C99";
+ private static Map<String, String> factValidityProperties = Maps.newHashMap();
@Getter
private static Map<String, String> storageToUpdatePeriodMap = new LinkedHashMap<String, String>();
@@ -160,6 +164,8 @@ public class CubeTestSetup {
THIS_YEAR_START = DateUtils.truncate(NOW, UpdatePeriod.YEARLY.calendarField());
THIS_YEAR_END = DateUtils.addYears(THIS_YEAR_START, 1);
+ LAST_YEAR_START = DateUtils.addYears(THIS_YEAR_START, -1);
+ LAST_YEAR_END = THIS_YEAR_START;
TWO_DAYS_RANGE_BEFORE_4_DAYS =
"time_range_in(d_time, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
+ CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')";
@@ -170,6 +176,8 @@ public class CubeTestSetup {
+ getDateUptoHours(NOW) + "')";
THIS_YEAR_RANGE =
"time_range_in(d_time, '" + getDateUptoHours(THIS_YEAR_START) + "','" + getDateUptoHours(THIS_YEAR_END) + "')";
+ LAST_YEAR_RANGE =
+ "time_range_in(d_time, '" + getDateUptoHours(LAST_YEAR_START) + "','" + getDateUptoHours(LAST_YEAR_END) + "')";
TWO_MONTHS_RANGE_UPTO_MONTH =
"time_range_in(d_time, '" + getDateUptoMonth(TWO_MONTHS_BACK) + "','" + getDateUptoMonth(NOW) + "')";
TWO_MONTHS_RANGE_UPTO_HOURS =
@@ -177,6 +185,7 @@ public class CubeTestSetup {
// calculate LAST_HOUR_TIME_RANGE
LAST_HOUR_TIME_RANGE = getTimeRangeString(getDateUptoHours(LAST_HOUR), getDateUptoHours(NOW));
+ factValidityProperties.put(MetastoreConstants.FACT_RELATIVE_START_TIME, "now.year - 90 days");
}
public static boolean isZerothHour() {
@@ -880,7 +889,8 @@ public class CubeTestSetup {
factColumns.add(new FieldSchema("dim11", "string", "base dim"));
// create cube fact
- client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
// create fact only with extra measures
factName = "testFact2_BASE";
@@ -896,7 +906,13 @@ public class CubeTestSetup {
factColumns.add(new FieldSchema("dim2", "int", "dim2 id"));
// create cube fact
- client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
+ Map<String, String> properties = Maps.newHashMap(factValidityProperties);
+ properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, DateUtil.relativeToAbsolute("now.day - 2 days"));
+ properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, DateUtil.relativeToAbsolute("now.day - 3 days"));
+ client.createCubeFactTable(BASE_CUBE_NAME, "testfact_deprecated", factColumns, storageAggregatePeriods, 5L,
+ properties, storageTables);
// create fact only with extra measures
factName = "testFact3_BASE";
@@ -911,7 +927,8 @@ public class CubeTestSetup {
factColumns.add(new FieldSchema("dim11", "string", "base dim"));
// create cube fact
- client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
// create raw fact only with extra measures
factName = "testFact2_RAW_BASE";
@@ -935,7 +952,8 @@ public class CubeTestSetup {
storageTables.put(c1, s1);
// create cube fact
- Map<String, String> properties = new HashMap<String, String>();
+ properties.clear();
+ properties.putAll(factValidityProperties);
properties.put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, "false");
client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
@@ -1022,7 +1040,8 @@ public class CubeTestSetup {
storageTables.put(c2, s1);
storageTables.put(c3, s1);
// create cube fact
- client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
CubeFactTable fact = client.getFactTable(factName);
Table table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(),
@@ -1157,7 +1176,8 @@ public class CubeTestSetup {
Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(c99, s2);
// create cube fact
- client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 0L, null, storageTables);
+ client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 0L,
+ factValidityProperties, storageTables);
CubeFactTable fact = client.getFactTable(factName);
// Add all hourly partitions for two days
@@ -1216,7 +1236,8 @@ public class CubeTestSetup {
Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(c1, s1);
// create cube fact
- client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
}
private void createCubeFactOnlyHourly(CubeMetastoreClient client) throws HiveException, LensException {
@@ -1252,7 +1273,8 @@ public class CubeTestSetup {
// create cube fact
client
- .createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 10L, null, storageTables);
+ .createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 10L,
+ factValidityProperties, storageTables);
CubeFactTable fact2 = client.getFactTable(factName);
// Add all hourly partitions for two days
Calendar cal = Calendar.getInstance();
@@ -1323,6 +1345,7 @@ public class CubeTestSetup {
// create cube fact
Map<String, String> properties = new HashMap<String, String>();
+ properties.putAll(factValidityProperties);
properties.put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, "false");
client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
@@ -1373,7 +1396,8 @@ public class CubeTestSetup {
storageTables.put(c2, s1);
// create cube fact
- client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L, null, storageTables);
+ client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
+ factValidityProperties, storageTables);
}
// DimWithTwoStorages
@@ -2041,6 +2065,7 @@ public class CubeTestSetup {
// create cube fact summary1
Map<String, String> properties = new HashMap<String, String>();
+ properties.putAll(factValidityProperties);
String validColumns = commonCols.toString() + ",dim1,testdim3id";
properties.put(MetastoreUtil.getValidColumnsKey(factName), validColumns);
CubeFactTable fact1 =
@@ -2050,7 +2075,6 @@ public class CubeTestSetup {
// create summary2 - same schema, different valid columns
factName = "summary2";
- properties = new HashMap<String, String>();
validColumns = commonCols.toString() + ",dim1,dim2";
properties.put(MetastoreUtil.getValidColumnsKey(factName), validColumns);
CubeFactTable fact2 =
@@ -2059,7 +2083,6 @@ public class CubeTestSetup {
createPIEParts(client, fact2, c2);
factName = "summary3";
- properties = new HashMap<String, String>();
validColumns = commonCols.toString() + ",dim1,dim2,cityid,stateid";
properties.put(MetastoreUtil.getValidColumnsKey(factName), validColumns);
CubeFactTable fact3 =
@@ -2074,7 +2097,6 @@ public class CubeTestSetup {
storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(c2, s2);
factName = "summary4";
- properties = new HashMap<String, String>();
validColumns = commonCols.toString() + ",dim1,dim2big1,dim2big2,cityid";
properties.put(MetastoreUtil.getValidColumnsKey(factName), validColumns);
CubeFactTable fact4 =
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 4f61671..13058e2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -205,8 +205,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
String query = "SELECT cityid, testCube.msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
CubeQueryContext cubeql = rewriteCtx(query, conf2);
String hQL = cubeql.toHQL();
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- CandidateFact candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ CandidateFact candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
String expectedQL =
getExpectedQuery(cubeName, "SELECT testcube.cityid," + " testCube.msr2 from ", null, null,
@@ -267,8 +267,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
String query = "SELECT cityid, avg(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE;
CubeQueryContext cubeql = rewriteCtx(query, conf);
String hQL = cubeql.toHQL();
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- CandidateFact candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ CandidateFact candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
String expectedQL =
getExpectedQuery(cubeName, "SELECT testcube.cityid," + " avg(testCube.msr2)) from ", null,
@@ -278,8 +278,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
// query with measure in a where clause
query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE testCube.msr1 < 100 and " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -289,8 +289,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, testCube.msr2 FROM testCube WHERE testCube.msr2 < 100 and " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -300,8 +300,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr1";
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -311,8 +311,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr3";
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -322,8 +322,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr1";
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -333,8 +333,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr3";
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -344,8 +344,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT distinct cityid, round(testCube.msr2) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -355,8 +355,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, count(distinct(testCube.msr2)) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -367,8 +367,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
// query with no default aggregate measure
query = "SELECT cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -378,8 +378,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT distinct cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -389,8 +389,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, count(distinct(testCube.msr1)) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
@@ -400,8 +400,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
query = "SELECT cityid, sum(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
cubeql = rewriteCtx(query, conf);
- Assert.assertEquals(1, cubeql.getCandidateFactTables().size());
- candidateFact = cubeql.getCandidateFactTables().iterator().next();
+ Assert.assertEquals(1, cubeql.getCandidateFacts().size());
+ candidateFact = cubeql.getCandidateFacts().iterator().next();
Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
hQL = cubeql.toHQL();
expectedQL =
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 2a8f082..2fd0a46 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -87,7 +87,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
new HashMap<String, List<CandidateTablePruneCause>>() {
{
put("testfact3_base,testfact3_raw_base", Arrays.asList(CandidateTablePruneCause.columnNotFound("stateid")));
- put("testfact2_raw_base,testfact2_base",
+ put("testfact_deprecated,testfact2_raw_base,testfact2_base",
Arrays.asList(CandidateTablePruneCause.columnNotFound("msr3", "msr13")));
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
index 14e56fb..a916dbf 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
@@ -19,6 +19,7 @@
package org.apache.lens.cube.parse;
import static java.util.Calendar.DAY_OF_MONTH;
+import static java.util.Calendar.MONTH;
import static org.apache.lens.cube.metadata.UpdatePeriod.*;
import static org.apache.lens.cube.parse.DateUtil.*;
@@ -27,11 +28,9 @@ import static org.apache.commons.lang.time.DateUtils.addMilliseconds;
import static org.testng.Assert.assertEquals;
-import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
-import java.util.Calendar;
import java.util.Date;
import java.util.Set;
@@ -58,15 +57,6 @@ public class TestDateUtil {
};
public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
- public static final String ABS_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss:SSS";
-
- public static final ThreadLocal<DateFormat> ABS_DATE_PARSER =
- new ThreadLocal<DateFormat>() {
- @Override
- protected SimpleDateFormat initialValue() {
- return new SimpleDateFormat(ABS_DATE_FORMAT);
- }
- };
private Date[] pairs;
@@ -85,17 +75,17 @@ public class TestDateUtil {
@Test
public void testMonthsBetween() throws Exception {
int i = 0;
- assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+ assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
new CoveringInfo(1, true),
"2013-Jan-01 to 2013-Jan-31");
i += 2;
- assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+ assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
new CoveringInfo(5, true),
"2013-Jan-01 to 2013-May-31");
i += 2;
- assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+ assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
new CoveringInfo(12, true),
"2013-Jan-01 to 2013-Dec-31");
@@ -131,7 +121,7 @@ public class TestDateUtil {
"2013-Jan-01 to 2013-May-31");
i += 2;
- assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+ assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
new CoveringInfo(4, true),
"2013-Jan-01 to 2013-Dec-31");
@@ -159,7 +149,7 @@ public class TestDateUtil {
"" + pairs[i] + "->" + pairs[i + 1]);
i += 2;
- assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], Calendar.MONTH)),
+ assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
new CoveringInfo(1, true), ""
+ pairs[i] + "->" + pairs[i + 1]);
@@ -233,26 +223,26 @@ public class TestDateUtil {
@Test
public void testFloorDate() throws ParseException {
- Date date = ABS_DATE_PARSER.get().parse("2015-01-01T00:00:00:000Z");
+ Date date = ABSDATE_PARSER.get().parse("2015-01-01-00:00:00,000");
Date curDate = date;
for (int i = 0; i < 284; i++) {
assertEquals(getFloorDate(curDate, YEARLY), date);
curDate = addMilliseconds(curDate, 111111111);
}
assertEquals(getFloorDate(curDate, YEARLY), DateUtils.addYears(date, 1));
- assertEquals(getFloorDate(date, WEEKLY), ABS_DATE_PARSER.get().parse("2014-12-28T00:00:00:000Z"));
+ assertEquals(getFloorDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2014-12-28-00:00:00,000"));
}
@Test
public void testCeilDate() throws ParseException {
- Date date = ABS_DATE_PARSER.get().parse("2015-12-26T06:30:15:040Z");
- assertEquals(getCeilDate(date, YEARLY), ABS_DATE_PARSER.get().parse("2016-01-01T00:00:00:000Z"));
- assertEquals(getCeilDate(date, MONTHLY), ABS_DATE_PARSER.get().parse("2016-01-01T00:00:00:000Z"));
- assertEquals(getCeilDate(date, DAILY), ABS_DATE_PARSER.get().parse("2015-12-27T00:00:00:000Z"));
- assertEquals(getCeilDate(date, HOURLY), ABS_DATE_PARSER.get().parse("2015-12-26T07:00:00:000Z"));
- assertEquals(getCeilDate(date, MINUTELY), ABS_DATE_PARSER.get().parse("2015-12-26T06:31:00:000Z"));
- assertEquals(getCeilDate(date, SECONDLY), ABS_DATE_PARSER.get().parse("2015-12-26T06:30:16:000Z"));
- assertEquals(getCeilDate(date, WEEKLY), ABS_DATE_PARSER.get().parse("2015-12-27T00:00:00:000Z"));
+ Date date = ABSDATE_PARSER.get().parse("2015-12-26-06:30:15,040");
+ assertEquals(getCeilDate(date, YEARLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+ assertEquals(getCeilDate(date, MONTHLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+ assertEquals(getCeilDate(date, DAILY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
+ assertEquals(getCeilDate(date, HOURLY), ABSDATE_PARSER.get().parse("2015-12-26-07:00:00,000"));
+ assertEquals(getCeilDate(date, MINUTELY), ABSDATE_PARSER.get().parse("2015-12-26-06:31:00,000"));
+ assertEquals(getCeilDate(date, SECONDLY), ABSDATE_PARSER.get().parse("2015-12-26-06:30:16,000"));
+ assertEquals(getCeilDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
}
@Test
@@ -284,4 +274,18 @@ public class TestDateUtil {
assertEquals(minusFourDaysDiff.negativeOffsetFrom(now), plusFourDaysDiff.offsetFrom(now));
assertEquals(minusFourDaysDiff.offsetFrom(now), plusFourDaysDiff.negativeOffsetFrom(now));
}
+
+ @Test
+ public void testRelativeToAbsolute() throws SemanticException {
+ Date now = new Date();
+ Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
+ Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
+ assertEquals(relativeToAbsolute("now", now), DateUtil.ABSDATE_PARSER.get().format(now));
+ assertEquals(relativeToAbsolute("now.day", now), DateUtil.ABSDATE_PARSER.get().format(nowDay));
+ assertEquals(relativeToAbsolute("now.day - 2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+ assertEquals(relativeToAbsolute("now.day - 2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+ assertEquals(relativeToAbsolute("now.day - 2day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+ assertEquals(relativeToAbsolute("now.day -2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+ assertEquals(relativeToAbsolute("now.day -2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/76638a1f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
new file mode 100644
index 0000000..f2d7990
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
+import static org.apache.lens.cube.parse.CubeTestSetup.*;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+public class TestTimeRangeResolver extends TestQueryRewrite {
+
+ private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
+
+ private Configuration conf;
+
+ @BeforeTest
+ public void setupDriver() throws Exception {
+ conf = new Configuration();
+ conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+ conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
+ conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
+ conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
+ conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+ }
+
+ @Override
+ public Configuration getConf() {
+ return new Configuration(conf);
+ }
+
+ @Test
+ public void testFactValidity() throws ParseException, SemanticException, LensException {
+ SemanticException e =
+ getSemanticExceptionInRewrite("cube select msr2 from " + cubeName + " where " + LAST_YEAR_RANGE,
+ getConf());
+ PruneCauses.BriefAndDetailedError causes = extractPruneCause(e);
+ assertTrue(causes.getBrief().contains("No facts available for all of these time ranges:"));
+ assertEquals(causes.getDetails().size(), 1);
+ assertEquals(causes.getDetails().values().iterator().next().size(), 1);
+ assertEquals(causes.getDetails().values().iterator().next().iterator().next().getCause(),
+ FACT_NOT_AVAILABLE_IN_RANGE);
+ }
+
+ @Test
+ public void testAbsoluteValidity() throws ParseException, HiveException, LensException {
+ CubeQueryContext ctx =
+ rewriteCtx("cube select msr12 from basecube where " + TWO_DAYS_RANGE + " or " + TWO_DAYS_RANGE_BEFORE_4_DAYS,
+ getConf());
+ assertEquals(ctx.getFactPruningMsgs().get(ctx.getMetastoreClient().getCubeFact("testfact_deprecated")).size(), 1);
+ CandidateTablePruneCause pruningMsg =
+ ctx.getFactPruningMsgs().get(ctx.getMetastoreClient().getCubeFact("testfact_deprecated")).get(0);
+ // testfact_deprecated's validity should be in between of both ranges. So both ranges should be in the invalid list
+ // That would prove that parsing of properties has gone through successfully
+ assertEquals(pruningMsg.getCause(), FACT_NOT_AVAILABLE_IN_RANGE);
+ assertTrue(pruningMsg.getInvalidRanges().containsAll(ctx.getTimeRanges()));
+ }
+}