You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pu...@apache.org on 2017/02/17 10:58:41 UTC
[4/4] lens git commit: Deleted deprecated classes, Fixed Checkstyles,
Fixed test cases, Fixed duplicate projections
Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections
Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/975fa2c2
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/975fa2c2
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/975fa2c2
Branch: refs/heads/lens-1381
Commit: 975fa2c2b110ebf8652bd0ce67cd86bb9ac35c03
Parents: 4af769e
Author: Sushil Mohanty,Puneet Gupta and Lavkesh Lahngir <su...@apache.org>
Authored: Fri Feb 17 16:28:05 2017 +0530
Committer: Puneet <pu...@inmobi.com>
Committed: Fri Feb 17 16:28:05 2017 +0530
----------------------------------------------------------------------
.../NoCandidateFactAvailableException.java | 7 +-
.../lens/cube/metadata/CubeMetastoreClient.java | 4 +-
.../lens/cube/metadata/FactPartition.java | 3 -
.../lens/cube/parse/AggregateResolver.java | 2 -
.../org/apache/lens/cube/parse/Candidate.java | 23 +-
.../parse/CandidateCoveringSetsResolver.java | 74 +++-
.../apache/lens/cube/parse/CandidateFact.java | 381 ----------------
.../cube/parse/CandidateTablePruneCause.java | 84 ++--
.../lens/cube/parse/CandidateTableResolver.java | 22 +-
.../apache/lens/cube/parse/CandidateUtil.java | 78 ++--
.../lens/cube/parse/CubeQueryContext.java | 100 +----
.../lens/cube/parse/CubeQueryRewriter.java | 4 +-
.../lens/cube/parse/DefaultAliasDecider.java | 4 +-
.../cube/parse/DenormalizationResolver.java | 28 +-
.../lens/cube/parse/ExpressionResolver.java | 9 +-
.../apache/lens/cube/parse/GroupbyResolver.java | 8 +-
.../apache/lens/cube/parse/JoinCandidate.java | 33 +-
.../lens/cube/parse/LeastPartitionResolver.java | 2 +-
.../cube/parse/MaxCoveringFactResolver.java | 3 +-
.../org/apache/lens/cube/parse/PruneCauses.java | 5 +-
.../lens/cube/parse/QueriedPhraseContext.java | 33 +-
.../lens/cube/parse/StorageCandidate.java | 192 +++++---
.../lens/cube/parse/StorageTableResolver.java | 43 +-
.../lens/cube/parse/TimeRangeChecker.java | 1 -
.../apache/lens/cube/parse/UnionCandidate.java | 24 +-
.../lens/cube/parse/UnionQueryWriter.java | 275 +++++++++---
.../lens/cube/parse/join/AutoJoinContext.java | 2 -
.../apache/lens/driver/cube/RewriterPlan.java | 1 -
.../apache/lens/cube/parse/CubeTestSetup.java | 7 +-
.../lens/cube/parse/TestAggregateResolver.java | 188 ++++----
.../lens/cube/parse/TestBaseCubeQueries.java | 433 ++++++++-----------
.../lens/cube/parse/TestBridgeTableQueries.java | 115 ++---
.../lens/cube/parse/TestCubeRewriter.java | 204 +++++----
.../cube/parse/TestDenormalizationResolver.java | 50 ++-
.../lens/cube/parse/TestExpressionResolver.java | 12 +-
.../lens/cube/parse/TestJoinResolver.java | 22 +-
.../lens/cube/parse/TestQueryMetrics.java | 26 +-
.../lens/cube/parse/TestRewriterPlan.java | 2 -
.../lens/cube/parse/TestTimeRangeResolver.java | 58 ++-
.../parse/TestTimeRangeWriterWithQuery.java | 15 +-
.../cube/parse/TestUnionAndJoinCandidates.java | 23 +-
.../lens/cube/parse/TestUnionQueries.java | 1 -
42 files changed, 1224 insertions(+), 1377 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
index 7d12762..301458f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -18,7 +18,6 @@
*/
package org.apache.lens.cube.error;
-import org.apache.lens.cube.metadata.CubeFactTable;
import org.apache.lens.cube.parse.PruneCauses;
import org.apache.lens.cube.parse.StorageCandidate;
import org.apache.lens.server.api.error.LensException;
@@ -29,7 +28,11 @@ public class NoCandidateFactAvailableException extends LensException {
private final PruneCauses<StorageCandidate> briefAndDetailedError;
public NoCandidateFactAvailableException(PruneCauses<StorageCandidate> briefAndDetailedError) {
- super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+ this(briefAndDetailedError.getBriefCause(), briefAndDetailedError);
+ }
+
+ public NoCandidateFactAvailableException(String errMsg, PruneCauses<StorageCandidate> briefAndDetailedError) {
+ super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), errMsg);
this.briefAndDetailedError = briefAndDetailedError;
}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 6c9cde2..aa2e9d1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -938,14 +938,14 @@ public class CubeMetastoreClient {
}
}
- private Date getStorageTableStartDate(String storageTable, String factTableName)
+ public Date getStorageTableStartDate(String storageTable, String factTableName)
throws LensException {
List<Date> startDates = getStorageTimes(storageTable, MetastoreUtil.getStoragetableStartTimesKey());
startDates.add(getFactTable(factTableName).getStartTime());
return Collections.max(startDates);
}
- private Date getStorageTableEndDate(String storageTable, String factTableName)
+ public Date getStorageTableEndDate(String storageTable, String factTableName)
throws LensException {
List<Date> endDates = getStorageTimes(storageTable, MetastoreUtil.getStoragetableEndTimesKey());
endDates.add(getFactTable(factTableName).getEndTime());
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 86d6056..1694b80 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -30,7 +30,6 @@ import lombok.Getter;
import lombok.Setter;
@EqualsAndHashCode
-// TODO union : Change the class name To StoragePartition
public class FactPartition implements Comparable<FactPartition> {
@Getter
private final String partCol;
@@ -40,8 +39,6 @@ public class FactPartition implements Comparable<FactPartition> {
private final Set<String> storageTables = new LinkedHashSet<String>();
@Getter
private final UpdatePeriod period;
-
- //TODO union : this is never set . Do we need this ?s
@Getter
@Setter
private FactPartition containingPart;
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index 79f38da..30b1a90 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -27,7 +27,6 @@ import java.util.Iterator;
import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.CubeMeasure;
import org.apache.lens.cube.metadata.ExprColumn;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
import org.apache.lens.server.api.error.LensException;
@@ -71,7 +70,6 @@ class AggregateResolver implements ContextRewriter {
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getHavingAST(), null, aggregateResolverDisabled)
|| hasMeasures(cubeql, cubeql.getWhereAST()) || hasMeasures(cubeql, cubeql.getGroupByAST())
|| hasMeasures(cubeql, cubeql.getOrderByAST())) {
- //TODO union : Note : Pending : cube segmentation design may change the above assumption and Set<Candidate> can contain and mix of StorageCandidate and UnionSegmentCandidate. This step can then ignore UnionSegmentCandidate
Iterator<Candidate> candItr = cubeql.getCandidates().iterator();
while (candItr.hasNext()) {
Candidate candidate = candItr.next();
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
index 1987939..095a297 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
@@ -1,6 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
package org.apache.lens.cube.parse;
-import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.Set;
@@ -93,7 +110,7 @@ public interface Candidate {
* 1. For a JoinCandidate, atleast one of the child candidates should be able to answer the expression
* 2. For a UnionCandidate, all child candidates should answer the expression
*
- * @param expr
+ * @param expr :Expression need to be evaluated for Candidate
* @return
*/
boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext expr);
@@ -104,4 +121,4 @@ public interface Candidate {
*/
Set<Integer> getAnswerableMeasurePhraseIndices();
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
index 6d85edf..a3a42ab 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
@@ -1,8 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
package org.apache.lens.cube.parse;
import java.util.*;
-import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.TimeRange;
import org.apache.lens.server.api.error.LensException;
@@ -31,9 +48,22 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
finalCandidates.addAll(cubeql.getCandidates());
}
List<Candidate> timeRangeCoveringSet = resolveTimeRangeCoveringFactSet(cubeql, queriedMsrs, qpcList);
+// if (timeRangeCoveringSet.isEmpty()) {
+// throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
+// + " does not have any facts that can cover the requested time range " + cubeql.getTimeRanges().toString()
+// + " and queried measure set " + getColumns(queriedMsrs).toString(),
+// cubeql.getStoragePruningMsgs());
+// }
+ log.info("Time covering candidates :{}", timeRangeCoveringSet);
List<List<Candidate>> measureCoveringSets = resolveJoinCandidates(timeRangeCoveringSet, queriedMsrs, cubeql);
+// if (measureCoveringSets.isEmpty()) {
+// throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
+// + " does not have any facts that can cover the queried measure set "
+// + getColumns(queriedMsrs).toString(),
+// cubeql.getStoragePruningMsgs());
+// }
updateFinalCandidates(measureCoveringSets, cubeql);
- log.info("Covering candidate sets :{}", finalCandidates);
+ log.info("Final Time and Measure covering candidates :{}", finalCandidates);
cubeql.getCandidates().clear();
cubeql.getCandidates().addAll(finalCandidates);
}
@@ -50,7 +80,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
}
private void updateFinalCandidates(List<List<Candidate>> joinCandidates, CubeQueryContext cubeql) {
- for (Iterator<List<Candidate>> itr = joinCandidates.iterator(); itr.hasNext(); ) {
+ for (Iterator<List<Candidate>> itr = joinCandidates.iterator(); itr.hasNext();) {
List<Candidate> joinCandidate = itr.next();
if (joinCandidate.size() == 1) {
finalCandidates.add(joinCandidate.iterator().next());
@@ -61,7 +91,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
}
private boolean isCandidateCoveringTimeRanges(UnionCandidate uc, List<TimeRange> ranges) {
- for (Iterator<TimeRange> itr = ranges.iterator(); itr.hasNext(); ) {
+ for (Iterator<TimeRange> itr = ranges.iterator(); itr.hasNext();) {
TimeRange range = itr.next();
if (!CandidateUtil.isTimeRangeCovered(uc.getChildren(), range.getFromDate(), range.getToDate())) {
return false;
@@ -70,11 +100,12 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
return true;
}
- private void pruneUnionCandidatesNotCoveringAllRanges(List<UnionCandidate> ucs, List<TimeRange> ranges) {
- for (Iterator<UnionCandidate> itr = ucs.iterator(); itr.hasNext(); ) {
+ private void pruneUnionCandidatesNotCoveringAllRanges(List<UnionCandidate> ucs, CubeQueryContext cubeql) {
+ for (Iterator<UnionCandidate> itr = ucs.iterator(); itr.hasNext();) {
UnionCandidate uc = itr.next();
- if (!isCandidateCoveringTimeRanges(uc, ranges)) {
+ if (!isCandidateCoveringTimeRanges(uc, cubeql.getTimeRanges())) {
itr.remove();
+ cubeql.addCandidatePruningMsg(uc, CandidateTablePruneCause.storageNotAvailableInRange(cubeql.getTimeRanges()));
}
}
}
@@ -96,7 +127,8 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
} else if (CandidateUtil.isPartiallyValidForTimeRanges(sc, cubeql.getTimeRanges())) {
allCandidatesPartiallyValid.add(CandidateUtil.cloneStorageCandidate(sc));
} else {
- //TODO union : Add cause
+ cubeql.addCandidatePruningMsg(sc, CandidateTablePruneCause.storageNotAvailableInRange(
+ cubeql.getTimeRanges()));
}
} else {
throw new LensException("Not a StorageCandidate!!");
@@ -108,7 +140,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
// Sort the Collection based on no of elements
Collections.sort(unionCoveringSet, new CandidateUtil.ChildrenSizeBasedCandidateComparator<UnionCandidate>());
// prune non covering sets
- pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql.getTimeRanges());
+ pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql);
// prune candidate set which doesn't contain any common measure i
pruneUnionCoveringSetWithoutAnyCommonMeasure(unionCoveringSet, queriedMsrs, cubeql);
// prune redundant covering sets
@@ -116,7 +148,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
// pruing done in the previous steps, now create union candidates
candidateSet.addAll(unionCoveringSet);
updateQueriableMeasures(candidateSet, qpcList, cubeql);
- return candidateSet ;
+ return candidateSet;
}
private boolean isMeasureAnswerablebyUnionCandidate(QueriedPhraseContext msr, Candidate uc,
@@ -137,7 +169,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
private void pruneUnionCoveringSetWithoutAnyCommonMeasure(List<UnionCandidate> ucs,
Set<QueriedPhraseContext> queriedMsrs,
CubeQueryContext cubeql) throws LensException {
- for (ListIterator<UnionCandidate> itr = ucs.listIterator(); itr.hasNext(); ) {
+ for (ListIterator<UnionCandidate> itr = ucs.listIterator(); itr.hasNext();) {
boolean toRemove = true;
UnionCandidate uc = itr.next();
for (QueriedPhraseContext msr : queriedMsrs) {
@@ -156,7 +188,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
for (int i = 0; i < candidates.size(); i++) {
UnionCandidate current = candidates.get(i);
int j = i + 1;
- for (ListIterator<UnionCandidate> itr = candidates.listIterator(j); itr.hasNext(); ) {
+ for (ListIterator<UnionCandidate> itr = candidates.listIterator(j); itr.hasNext();) {
UnionCandidate next = itr.next();
if (next.getChildren().containsAll(current.getChildren())) {
itr.remove();
@@ -182,7 +214,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
clonedI = clonedI >>> 1;
--count;
}
- combinations.add(new UnionCandidate(individualCombinationList, cubeql ));
+ combinations.add(new UnionCandidate(individualCombinationList, cubeql));
}
return combinations;
}
@@ -192,7 +224,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
List<List<Candidate>> msrCoveringSets = new ArrayList<>();
List<Candidate> ucSet = new ArrayList<>(unionCandidates);
// Check if a single set can answer all the measures and exprsWithMeasures
- for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext(); ) {
+ for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext();) {
boolean evaluable = false;
Candidate uc = i.next();
for (QueriedPhraseContext msr : msrs) {
@@ -211,7 +243,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
}
// Sets that contain all measures or no measures are removed from iteration.
// find other facts
- for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext(); ) {
+ for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext();) {
Candidate uc = i.next();
i.remove();
// find the remaining measures in other facts
@@ -238,7 +270,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
private void updateQueriableMeasures(List<Candidate> cands,
List<QueriedPhraseContext> qpcList, CubeQueryContext cubeql) throws LensException {
- for (Candidate cand : cands ) {
+ for (Candidate cand : cands) {
updateStorageCandidateQueriableMeasures(cand, qpcList, cubeql);
}
}
@@ -276,4 +308,12 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
}
}
}
-}
\ No newline at end of file
+
+ private static Set<String> getColumns(Collection<QueriedPhraseContext> queriedPhraseContexts) {
+ Set<String> cols = new HashSet<>();
+ for (QueriedPhraseContext qur : queriedPhraseContexts) {
+ cols.addAll(qur.getColumns());
+ }
+ return cols;
+ }
+}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
deleted file mode 100644
index ef7b9bc..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ /dev/null
@@ -1,381 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
-import java.util.*;
-
-import org.apache.lens.cube.metadata.*;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.session.SessionState;
-
-import org.antlr.runtime.CommonToken;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import lombok.Getter;
-import lombok.Setter;
-
-//TODO union : delete this class and use Candidate and StorageCandidtae
-/**
- * Holds context of a candidate fact table.
- */
-@Deprecated
-public class CandidateFact implements CandidateTable, QueryAST {
- final CubeFactTable fact;
- @Getter
- private Set<String> storageTables;
- @Getter
- private int numQueriedParts = 0;
- @Getter
- private final Set<FactPartition> partsQueried = Sets.newHashSet();
-
- private CubeInterface baseTable;
- @Getter
- @Setter
- private ASTNode selectAST;
- @Getter
- @Setter
- private ASTNode whereAST;
- @Getter
- @Setter
- private ASTNode groupByAST;
- @Getter
- @Setter
- private ASTNode havingAST;
- @Getter
- @Setter
- private ASTNode joinAST;
- @Getter
- @Setter
- private ASTNode orderByAST;
- @Getter
- @Setter
- private Integer limitValue;
- @Getter
- private String fromString;
- private final List<Integer> selectIndices = Lists.newArrayList();
- private final List<Integer> dimFieldIndices = Lists.newArrayList();
- private Collection<String> columns;
- @Getter
- private final Map<String, ASTNode> storgeWhereClauseMap = new HashMap<>();
- @Getter
- private final Map<String, String> storgeWhereStringMap = new HashMap<>();
- @Getter
- private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap = new HashMap<>();
- @Getter
- private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap = new HashMap<>();
- @Getter
- @Setter
- private Map<String, Map<String, Float>> dataCompletenessMap;
-
- CandidateFact(CubeFactTable fact, CubeInterface cube) {
- this.fact = fact;
- this.baseTable = cube;
- }
-
- @Override
- public String toString() {
- return fact.toString();
- }
-
- public Collection<String> getColumns() {
- if (columns == null) {
- columns = fact.getValidColumns();
- if (columns == null) {
- columns = fact.getAllFieldNames();
- }
- }
- return columns;
- }
-
- @Override
- public Set<?> getParticipatingPartitions() {
- return null;
- }
-
- public boolean isValidForTimeRange(TimeRange timeRange) {
- return (!timeRange.getFromDate().before(fact.getStartTime())) && (!timeRange.getToDate().after(fact.getEndTime()));
- }
-
- public void addToHaving(ASTNode ast) {
- if (getHavingAST() == null) {
- setHavingAST(new ASTNode(new CommonToken(TOK_HAVING, "TOK_HAVING")));
- getHavingAST().addChild(ast);
- return;
- }
- ASTNode existingHavingAST = (ASTNode) getHavingAST().getChild(0);
- ASTNode newHavingAST = new ASTNode(new CommonToken(KW_AND, "AND"));
- newHavingAST.addChild(existingHavingAST);
- newHavingAST.addChild(ast);
- getHavingAST().setChild(0, newHavingAST);
- }
-
- public String addAndGetAliasFromSelect(ASTNode ast, AliasDecider aliasDecider) {
- for (Node n : getSelectAST().getChildren()) {
- ASTNode astNode = (ASTNode) n;
- if (HQLParser.equalsAST(ast, (ASTNode) astNode.getChild(0))) {
- if (astNode.getChildCount() > 1) {
- return astNode.getChild(1).getText();
- }
- String alias = aliasDecider.decideAlias(astNode);
- astNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
- return alias;
- }
- }
- // Not found, have to add to select
- String alias = aliasDecider.decideAlias(ast);
- ASTNode selectExprNode = new ASTNode(new CommonToken(TOK_SELEXPR));
- selectExprNode.addChild(ast);
- selectExprNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
- getSelectAST().addChild(selectExprNode);
- return alias;
- }
-
- void incrementPartsQueried(int incr) {
- numQueriedParts += incr;
- }
-
- // copy ASTs from CubeQueryContext
- public void copyASTs(CubeQueryContext cubeql) throws LensException {
- setSelectAST(MetastoreUtil.copyAST(cubeql.getSelectAST()));
- setWhereAST(MetastoreUtil.copyAST(cubeql.getWhereAST()));
- if (cubeql.getJoinAST() != null) {
- setJoinAST(MetastoreUtil.copyAST(cubeql.getJoinAST()));
- }
- if (cubeql.getGroupByAST() != null) {
- setGroupByAST(MetastoreUtil.copyAST(cubeql.getGroupByAST()));
- }
- }
-
-
- public ASTNode getStorageWhereClause(String storageTable) {
- return storgeWhereClauseMap.get(storageTable);
- }
- public String getStorageWhereString(String storageTable) {
- return storgeWhereStringMap.get(storageTable);
- }
-
- public boolean isExpressionAnswerable(ASTNode node, CubeQueryContext context) throws LensException {
- return getColumns().containsAll(HQLParser.getColsInExpr(context.getAliasForTableName(context.getCube()), node));
- }
-
- /**
- * Update the ASTs to include only the fields queried from this fact, in all the expressions
- *
- * @param cubeql
- * @throws LensException
- */
- public void updateASTs(CubeQueryContext cubeql) throws LensException {
- // update select AST with selected fields
- int currentChild = 0;
- for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
- ASTNode selectExpr = (ASTNode) this.selectAST.getChild(currentChild);
- Set<String> exprCols = HQLParser.getColsInExpr(cubeql.getAliasForTableName(cubeql.getCube()), selectExpr);
- if (getColumns().containsAll(exprCols)) {
- selectIndices.add(i);
- if (exprCols.isEmpty() // no direct fact columns
- // does not have measure names
- || (!containsAny(cubeql.getCube().getMeasureNames(), exprCols))) {
- dimFieldIndices.add(i);
- }
- ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, Identifier);
- String alias = cubeql.getSelectPhrases().get(i).getSelectAlias();
- if (aliasNode != null) {
- String queryAlias = aliasNode.getText();
- if (!queryAlias.equals(alias)) {
- // replace the alias node
- ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
- this.selectAST.getChild(currentChild).replaceChildren(selectExpr.getChildCount() - 1,
- selectExpr.getChildCount() - 1, newAliasNode);
- }
- } else {
- // add column alias
- ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
- this.selectAST.getChild(currentChild).addChild(newAliasNode);
- }
- } else {
- this.selectAST.deleteChild(currentChild);
- currentChild--;
- }
- currentChild++;
- }
-
- // don't need to update where ast, since where is only on dim attributes and dim attributes
- // are assumed to be common in multi fact queries.
-
- // push down of having clauses happens just after this call in cubequerycontext
- }
-
- // The source set contains atleast one column in the colSet
- static boolean containsAny(Collection<String> srcSet, Collection<String> colSet) {
- if (colSet == null || colSet.isEmpty()) {
- return true;
- }
- for (String column : colSet) {
- if (srcSet.contains(column)) {
- return true;
- }
- }
- return false;
- }
-
- @Override
- public String getStorageString(String alias) {
- return StringUtils.join(storageTables, ",") + " " + alias;
- }
-
- @Override
- public String getStorageName() {
- return null;
- }
-
- public void setStorageTables(Set<String> storageTables) {
- String database = SessionState.get().getCurrentDatabase();
- // Add database name prefix for non default database
- if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
- Set<String> storageTbls = new TreeSet<>();
- Iterator<String> names = storageTables.iterator();
- while (names.hasNext()) {
- storageTbls.add(database + "." + names.next());
- }
- this.storageTables = storageTbls;
- } else {
- this.storageTables = storageTables;
- }
- }
-
- @Override
- public AbstractCubeTable getBaseTable() {
- return (AbstractCubeTable) baseTable;
- }
-
- @Override
- public CubeFactTable getTable() {
- return fact;
- }
-
- @Override
- public String getName() {
- return fact.getName();
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!super.equals(obj)) {
- return false;
- }
- CandidateFact other = (CandidateFact) obj;
-
- if (this.getTable() == null) {
- if (other.getTable() != null) {
- return false;
- }
- }
- return true;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = super.hashCode();
- result = prime * result + ((getTable() == null) ? 0 : getTable().getName().toLowerCase().hashCode());
- return result;
- }
-
- public String getSelectString() {
- return HQLParser.getString(selectAST);
- }
-
- public String getWhereString() {
- if (whereAST != null) {
- return HQLParser.getString(whereAST);
- }
- return null;
- }
-
- public String getHavingString() {
- if (havingAST != null) {
- return HQLParser.getString(havingAST);
- }
- return null;
- }
-
- @Override
- public String getOrderByString() {
- if (orderByAST != null) {
- return HQLParser.getString(orderByAST);
- }
- return null;
- }
-
- /**
- * @return the selectIndices
- */
- public List<Integer> getSelectIndices() {
- return selectIndices;
- }
-
- /**
- * @return the groupbyIndices
- */
- public List<Integer> getDimFieldIndices() {
- return dimFieldIndices;
- }
-
- public String getGroupByString() {
- if (groupByAST != null) {
- return HQLParser.getString(groupByAST);
- }
- return null;
- }
-
- public Set<String> getTimePartCols(CubeQueryContext query) throws LensException {
- Set<String> cubeTimeDimensions = baseTable.getTimedDimensions();
- Set<String> timePartDimensions = new HashSet<String>();
- String singleStorageTable = storageTables.iterator().next();
- List<FieldSchema> partitionKeys = null;
- partitionKeys = query.getMetastoreClient().getTable(singleStorageTable).getPartitionKeys();
- for (FieldSchema fs : partitionKeys) {
- if (cubeTimeDimensions.contains(CubeQueryContext.getTimeDimOfPartitionColumn(baseTable, fs.getName()))) {
- timePartDimensions.add(fs.getName());
- }
- }
- return timePartDimensions;
- }
-
- /*
- public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
- Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
- fromString = "%s"; // to update the storage alias later
- if (query.isAutoJoinResolved()) {
- fromString =
- query.getAutoJoinCtx().getFromString(fromString, this, queryDims, dimsToQuery,
- query, this);
- }
- }
- */
-}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index cef8f37..c7f2047 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -38,25 +38,34 @@ import lombok.NoArgsConstructor;
@JsonWriteNullProperties(false)
@Data
@NoArgsConstructor
-//TODO union: Since we are working on StoargeCandidates now, we might need some chnages here
public class CandidateTablePruneCause {
public enum CandidateTablePruneCode {
// other fact set element is removed
ELEMENT_IN_SET_PRUNED("Other candidate from measure covering set is pruned"),
- FACT_NOT_AVAILABLE_IN_RANGE("No facts available for all of these time ranges: %s") {
- @Override
+
+ COLUMN_NOT_FOUND("%s are not %s") {
Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
- Set<TimeRange> allRanges = Sets.newHashSet();
- for (CandidateTablePruneCause cause : causes) {
- allRanges.addAll(cause.getInvalidRanges());
+ if (causes.size() == 1) {
+ return new String[]{
+ "Columns " + causes.iterator().next().getMissingColumns(),
+ "present in any table",
+ };
+ } else {
+ List<List<String>> columnSets = new ArrayList<List<String>>();
+ for (CandidateTablePruneCause cause : causes) {
+ columnSets.add(cause.getMissingColumns());
+ }
+ return new String[]{
+ "Column Sets: " + columnSets,
+ "queriable together",
+ };
}
- return new Object[]{
- allRanges.toString(),
- };
}
},
-
+ // candidate table tries to get denormalized field from dimension and the
+ // referred dimension is invalid.
+ INVALID_DENORM_TABLE("Referred dimension is invalid in one of the candidate tables"),
// Moved from Stoarge causes .
//The storage is removed as its not set in property "lens.cube.query.valid.fact.<fact_name>.storagetables"
@@ -65,9 +74,7 @@ public class CandidateTablePruneCause {
// STOARGE_TABLE_DOES_NOT_EXIST("Storage table does not exist"),
// storage has no update periods queried. Commented as its not being used anywhere in master.
// MISSING_UPDATE_PERIODS("Storage has no update periods"),
- // no candidate update periods, update period cause will have why each
- // update period is not a candidate
- NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not candidate"),
+
// storage table has no partitions queried
NO_PARTITIONS("Storage table has no partitions"),
// partition column does not exist
@@ -76,7 +83,20 @@ public class CandidateTablePruneCause {
TIME_RANGE_NOT_ANSWERABLE("Range not answerable"),
// storage is not supported by execution engine/driver
UNSUPPORTED_STORAGE("Unsupported Storage"),
-
+
+ STORAGE_NOT_AVAILABLE_IN_RANGE("No storages available for all of these time ranges: %s") {
+ @Override
+ Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
+ Set<TimeRange> allRanges = Sets.newHashSet();
+ for (CandidateTablePruneCause cause : causes) {
+ allRanges.addAll(cause.getInvalidRanges());
+ }
+ return new Object[]{
+ allRanges.toString(),
+ };
+ }
+ },
+
// least weight not satisfied
MORE_WEIGHT("Picked table had more weight than minimum."),
// partial data is enabled, another fact has more data.
@@ -95,13 +115,10 @@ public class CandidateTablePruneCause {
return new String[]{columns.toString()};
}
},
- // candidate table tries to get denormalized field from dimension and the
- // referred dimension is invalid.
- INVALID_DENORM_TABLE("Referred dimension is invalid in one of the candidate tables"),
// column not valid in cube table. Commented the below line as it's not being used in master.
//COLUMN_NOT_VALID("Column not valid in cube table"),
// column not found in cube table
- COLUMN_NOT_FOUND("%s are not %s") {
+ DENORM_COLUMN_NOT_FOUND("%s are not %s") {
Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
if (causes.size() == 1) {
return new String[]{
@@ -138,7 +155,13 @@ public class CandidateTablePruneCause {
};
}
},
- NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE("No fact update periods for given range"),
+ //Commented as its not used anymore.
+ //NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE("No fact update periods for given range"),
+
+ // no candidate update periods, update period cause will have why each
+ // update period is not a candidate
+ NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not valid for given time range"),
+
NO_COLUMN_PART_OF_A_JOIN_PATH("No column part of a join path. Join columns: [%s]") {
Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
List<String> columns = new ArrayList<String>();
@@ -232,8 +255,6 @@ public class CandidateTablePruneCause {
// the fact is not partitioned by part col of the time dim and time dim is not a dim attribute
private Set<String> unsupportedTimeDims;
// time covered
- // TODO union : Fix this after MaxCoveringFactResolver chnaged wrt. Candidate
- //private MaxCoveringFactResolver.TimeCovered maxTimeCovered;
// ranges in which fact is invalid
private List<TimeRange> invalidRanges;
@@ -247,8 +268,8 @@ public class CandidateTablePruneCause {
}
// Different static constructors for different causes.
- public static CandidateTablePruneCause factNotAvailableInRange(List<TimeRange> ranges) {
- CandidateTablePruneCause cause = new CandidateTablePruneCause(FACT_NOT_AVAILABLE_IN_RANGE);
+ public static CandidateTablePruneCause storageNotAvailableInRange(List<TimeRange> ranges) {
+ CandidateTablePruneCause cause = new CandidateTablePruneCause(STORAGE_NOT_AVAILABLE_IN_RANGE);
cause.invalidRanges = ranges;
return cause;
}
@@ -258,22 +279,23 @@ public class CandidateTablePruneCause {
return cause;
}
- public static CandidateTablePruneCause columnNotFound(Collection<String>... missingColumns) {
+ public static CandidateTablePruneCause columnNotFound(CandidateTablePruneCode pruneCode,
+ Collection<String>... missingColumns) {
List<String> colList = new ArrayList<String>();
for (Collection<String> missing : missingColumns) {
colList.addAll(missing);
}
- CandidateTablePruneCause cause = new CandidateTablePruneCause(COLUMN_NOT_FOUND);
+ CandidateTablePruneCause cause = new CandidateTablePruneCause(pruneCode);
cause.setMissingColumns(colList);
return cause;
}
- public static CandidateTablePruneCause columnNotFound(String... columns) {
+ public static CandidateTablePruneCause columnNotFound(CandidateTablePruneCode pruneCode, String... columns) {
List<String> colList = new ArrayList<String>();
for (String column : columns) {
colList.add(column);
}
- return columnNotFound(colList);
+ return columnNotFound(pruneCode, colList);
}
public static CandidateTablePruneCause expressionNotEvaluable(String... exprs) {
@@ -300,14 +322,6 @@ public class CandidateTablePruneCause {
return cause;
}
- // TODO union : uncomment the below method after MaxCoveringFactResolver is fixed wrt. Candidate
- /*
- public static CandidateTablePruneCause lessData(MaxCoveringFactResolver.TimeCovered timeCovered) {
- CandidateTablePruneCause cause = new CandidateTablePruneCause(LESS_DATA);
- cause.setMaxTimeCovered(timeCovered);
- return cause;
- }
-*/
public static CandidateTablePruneCause noColumnPartOfAJoinPath(final Collection<String> colSet) {
CandidateTablePruneCause cause =
new CandidateTablePruneCause(NO_COLUMN_PART_OF_A_JOIN_PATH);
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 7a885a2..2ab7f4b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -58,7 +58,6 @@ class CandidateTableResolver implements ContextRewriter {
public void rewriteContext(CubeQueryContext cubeql) throws LensException {
if (checkForQueriedColumns) {
log.debug("Dump queried columns:{}", cubeql.getTblAliasToColumns());
- //TODO union : create StoargeCandidate s now in populateCandidateTables
populateCandidateTables(cubeql);
resolveCandidateFactTables(cubeql);
resolveCandidateDimTables(cubeql);
@@ -74,7 +73,6 @@ class CandidateTableResolver implements ContextRewriter {
if (cubeql.getAutoJoinCtx() != null) {
// Before checking for candidate table columns, prune join paths containing non existing columns
// in populated candidate tables
- //TODO rewrite : commented below line to compile
cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(),
CandidateUtil.getStorageCandidates(cubeql.getCandidates()), null);
cubeql.getAutoJoinCtx().pruneAllPathsForCandidateDims(cubeql.getCandidateDimTables());
@@ -84,8 +82,6 @@ class CandidateTableResolver implements ContextRewriter {
// check for joined columns and denorm columns on refered tables
resolveCandidateFactTablesForJoins(cubeql);
resolveCandidateDimTablesForJoinsAndDenorms(cubeql);
- // TODO union : below method can be deleted from CubeQueryContext
- //cubeql.pruneCandidateFactSet(CandidateTablePruneCode.INVALID_DENORM_TABLE);
checkForQueriedColumns = true;
}
}
@@ -260,7 +256,7 @@ class CandidateTableResolver implements ContextRewriter {
}
}
// Remove storage candidates based on whether they are valid or not.
- for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext(); ) {
+ for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext();) {
Candidate cand = i.next();
if (cand instanceof StorageCandidate) {
StorageCandidate sc = (StorageCandidate) cand;
@@ -287,7 +283,8 @@ class CandidateTableResolver implements ContextRewriter {
for (QueriedPhraseContext qur : dimExprs) {
if (!qur.isEvaluable(cubeql, sc)) {
log.info("Not considering storage candidate:{} as columns {} are not available", sc, qur.getColumns());
- cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(qur.getColumns()));
+ cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+ CandidateTablePruneCode.COLUMN_NOT_FOUND, qur.getColumns()));
toRemove = true;
break;
}
@@ -299,7 +296,8 @@ class CandidateTableResolver implements ContextRewriter {
if (!checkForFactColumnExistsAndValidForRange(sc, queriedMsrs, cubeql)) {
Set<String> columns = getColumns(queriedMsrs);
log.info("Not considering storage candidate:{} as columns {} is not available", sc, columns);
- cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(columns));
+ cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+ CandidateTablePruneCode.COLUMN_NOT_FOUND, columns));
toRemove = true;
}
@@ -312,7 +310,8 @@ class CandidateTableResolver implements ContextRewriter {
if (optdim == null) {
log.info("Not considering storage candidate:{} as columns {} are not available", sc,
chain.getSourceColumns());
- cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(chain.getSourceColumns()));
+ cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+ CandidateTablePruneCode.COLUMN_NOT_FOUND, chain.getSourceColumns()));
toRemove = true;
break;
}
@@ -540,7 +539,7 @@ class CandidateTableResolver implements ContextRewriter {
log.info("Not considering Storage:{} as its required optional dims are not reachable", candidate);
cubeql.getCandidates().remove(candidate);
cubeql.addStoragePruningMsg((StorageCandidate) candidate,
- CandidateTablePruneCause.columnNotFound(col));
+ CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
Collection<Candidate> prunedCandidates = CandidateUtil.
filterCandidates(cubeql.getCandidates(), (StorageCandidate) candidate);
cubeql.addCandidatePruningMsg(prunedCandidates,
@@ -551,7 +550,7 @@ class CandidateTableResolver implements ContextRewriter {
cubeql.getCandidateDimTables().get(((CandidateDim) candidate).getBaseTable()).remove(candidate);
cubeql.addDimPruningMsgs((Dimension) candidate.getBaseTable(),
(CubeDimensionTable) candidate.getTable(),
- CandidateTablePruneCause.columnNotFound(col));
+ CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
}
}
}
@@ -650,7 +649,8 @@ class CandidateTableResolver implements ContextRewriter {
// check if it available as reference, if not remove the
// candidate
log.info("Not considering dimtable: {} as column {} is not available", cdim, col);
- cubeql.addDimPruningMsgs(dim, cdim.getTable(), CandidateTablePruneCause.columnNotFound(col));
+ cubeql.addDimPruningMsgs(dim, cdim.getTable(), CandidateTablePruneCause.columnNotFound(
+ CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
i.remove();
break;
}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
index 6cb7e3f..025a6ba 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
@@ -1,26 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for theJoinCandidate.java
+ * specific language governing permissions and limitations
+ * under the License.
+ */
package org.apache.lens.cube.parse;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
+
import java.util.*;
-import org.antlr.runtime.CommonToken;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
-import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.cube.metadata.*;
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import org.antlr.runtime.CommonToken;
import com.google.common.collect.BoundType;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-
/**
* Placeholder for Util methods that will be required for {@link Candidate}
*/
@@ -113,21 +129,12 @@ public class CandidateUtil {
}
public static Set<StorageCandidate> getStorageCandidates(final Candidate candidate) {
- return getStorageCandidates(new HashSet<Candidate>(1) {{
- add(candidate);
- }});
+ return getStorageCandidates(new HashSet<Candidate>(1) {{ add(candidate); }});
}
- /**
- *
- * @param candSet
- * @param msrs
- * @param cubeql
- * @return
- * @throws LensException
- */
+
public static Set<QueriedPhraseContext> coveredMeasures(Candidate candSet, Collection<QueriedPhraseContext> msrs,
- CubeQueryContext cubeql) throws LensException {
+ CubeQueryContext cubeql) throws LensException {
Set<QueriedPhraseContext> coveringSet = new HashSet<>();
for (QueriedPhraseContext msr : msrs) {
if (candSet.getChildren() == null) {
@@ -136,12 +143,12 @@ public class CandidateUtil {
}
} else {
// TODO union : all candidates should answer
- for (Candidate cand : candSet.getChildren()) {
- if (msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
- coveringSet.add(msr);
- }
+ for (Candidate cand : candSet.getChildren()) {
+ if (msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
+ coveringSet.add(msr);
}
}
+ }
}
return coveringSet;
}
@@ -190,6 +197,7 @@ public class CandidateUtil {
/**
* Gets all the Storage Candidates that participate in the collection of passed candidates
+ *
* @param candidates
* @return
*/
@@ -211,7 +219,7 @@ public class CandidateUtil {
}
}
- public static StorageCandidate cloneStorageCandidate(StorageCandidate sc) {
+ public static StorageCandidate cloneStorageCandidate(StorageCandidate sc) throws LensException{
return new StorageCandidate(sc);
}
@@ -222,11 +230,10 @@ public class CandidateUtil {
}
}
- private static final String baseQueryFormat = "SELECT %s FROM %s";
-
- public static String buildHQLString(String select, String from, String where, String groupby, String orderby, String having,
- Integer limit) {
+ private static final String BASE_QUERY_FORMAT = "SELECT %s FROM %s";
+ public static String buildHQLString(String select, String from, String where,
+ String groupby, String orderby, String having, Integer limit) {
List<String> qstrs = new ArrayList<String>();
qstrs.add(select);
qstrs.add(from);
@@ -247,7 +254,7 @@ public class CandidateUtil {
}
StringBuilder queryFormat = new StringBuilder();
- queryFormat.append(baseQueryFormat);
+ queryFormat.append(BASE_QUERY_FORMAT);
if (!StringUtils.isBlank(where)) {
queryFormat.append(" WHERE %s");
}
@@ -307,15 +314,4 @@ public class CandidateUtil {
}
return false;
}
-
-
- public static Set<String> getMissingPartitions(StorageCandidate sc) {
- Set<String> missingParts = new HashSet<>();
- for (FactPartition part : sc.getParticipatingPartitions()) {
- if (!part.isFound()) {
- missingParts.add(part.toString()); //TODOD union . add approprite partition String
- }
- }
- return missingParts;
- }
}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 470d6e7..f602c5f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -36,7 +36,6 @@ import org.apache.lens.cube.error.NoCandidateDimAvailableException;
import org.apache.lens.cube.error.NoCandidateFactAvailableException;
import org.apache.lens.cube.metadata.*;
import org.apache.lens.cube.metadata.join.TableRelationship;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.cube.parse.join.AutoJoinContext;
import org.apache.lens.cube.parse.join.JoinClause;
import org.apache.lens.cube.parse.join.JoinTree;
@@ -180,10 +179,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
@Getter
@Setter
private DenormalizationResolver.DenormalizationContext deNormCtx;
- //TODO union : deprecate factPruningMsgs
- @Getter
- @Deprecated
- private PruneCauses<CubeFactTable> factPruningMsgs = new PruneCauses<>();
@Getter
private PruneCauses<StorageCandidate> storagePruningMsgs = new PruneCauses<>();
@Getter
@@ -346,7 +341,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
return false;
}
} catch (LensException e) {
- //TODO: check if catch can be removed
return false;
}
return true;
@@ -486,26 +480,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
return candidateDims;
}
- /**
- * TODO union : deprecate this method and use
- * {@link # addFactPruningMsg(CubeInterface, CubeFactTable, CandidateTablePruneCause)}
- * or
- * {@link #addStoragePruningMsg(StorageCandidate, CandidateTablePruneCause)}
- * */
- @Deprecated
- public void addFactPruningMsgs(CubeFactTable fact, CandidateTablePruneCause factPruningMsg) {
- throw new IllegalStateException("This method is deprecate");
- }
-
- //TODO union : not required as all the pruning happening at StorageCandidate
- /*
- public void addFactPruningMsg(CubeInterface cube, CubeFactTable fact, CandidateTablePruneCause factPruningMsg) {
- log.info("Pruning fact {} with cause: {}", fact, factPruningMsg);
- for (String storageName : fact.getStorages()) {
- addStoragePruningMsg(new StorageCandidate(cube, fact, storageName), factPruningMsg);
- }
- }
-*/
public void addCandidatePruningMsg(Collection<Candidate> candidateCollection, CandidateTablePruneCause pruneCause) {
for (Candidate c : candidateCollection){
addCandidatePruningMsg(c, pruneCause);
@@ -735,7 +709,8 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
qb.getParseInfo().setDestLimit(getClause(), 0, value);
}
- private String getStorageStringWithAlias(StorageCandidate candidate, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
+ private String getStorageStringWithAlias(StorageCandidate candidate, Map<Dimension,
+ CandidateDim> dimsToQuery, String alias) {
if (cubeTbls.get(alias) instanceof CubeInterface) {
return candidate.getAliasForTable(alias);
} else {
@@ -815,7 +790,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
}
}
- // TODO union : Reevaluate this method.
void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws LensException {
if (!nonExistingParts.isEmpty()) {
ByteArrayOutputStream out = null;
@@ -912,7 +886,8 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
}
}
}
- log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", storagePruningMsgs.toJsonObject());
+ log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}",
+ storagePruningMsgs.toJsonObject());
throw new NoCandidateFactAvailableException(storagePruningMsgs);
}
}
@@ -922,7 +897,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
private HQLContextInterface hqlContext;
@Getter
- //TODO union : This will be the final Candidate . private Candidate pickedCandidate
private Candidate pickedCandidate;
@Getter
private Collection<CandidateDim> pickedDimTables;
@@ -956,10 +930,9 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
Map<StorageCandidate, Set<Dimension>> factDimMap = new HashMap<>();
if (cand != null) {
- // copy ASTs for each storage candidate
+ // Set the default queryAST for StorageCandidate and copy child ASTs from cubeql.
+ // Later in the rewrite flow each Storage candidate will modify them accordingly.
for (StorageCandidate sc : scSet) {
- // Set the default queryAST for StorageCandidate and copy child ASTs from cubeql.
- // Later in the rewrite flow each Storage candidate will modify them accordingly.
sc.setQueryAst(DefaultQueryAST.fromStorageCandidate(sc, this));
CandidateUtil.copyASTs(this, sc.getQueryAst());
factDimMap.put(sc, new HashSet<>(dimsToQuery.keySet()));
@@ -1046,10 +1019,10 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
} else if (cand instanceof StorageCandidate) {
StorageCandidate sc = (StorageCandidate) cand;
sc.updateAnswerableSelectColumns(this);
- return getInsertClause() + sc.toHQL();
+ return getInsertClause() + sc.toHQL(factDimMap.get(sc));
} else {
UnionQueryWriter uqc = new UnionQueryWriter(cand, this);
- return getInsertClause() + uqc.toHQL();
+ return getInsertClause() + uqc.toHQL(factDimMap);
}
}
@@ -1232,63 +1205,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
queriedExprsWithMeasures.addAll(exprs);
}
- /**
- * Prune candidate fact sets with respect to available candidate facts.
- * <p></p>
- * Prune a candidate set, if any of the fact is missing.
- *
- */
- //TODO union : deprecated
- @Deprecated
- /*
- public void pruneCandidateFactSet(CandidateTablePruneCode pruneCause) {
- // remove candidate fact sets that have missing facts
- for (Iterator<Set<CandidateFact>> i = candidateFactSets.iterator(); i.hasNext();) {
- Set<CandidateFact> cfacts = i.next();
- if (!candidateFacts.containsAll(cfacts)) {
- log.info("Not considering fact table set:{} as they have non candidate tables and facts missing because of {}",
- cfacts, pruneCause);
- i.remove();
- }
- }
- // prune candidate facts
- pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED);
- }
-*/
- /**
- * Prune candidate fact with respect to available candidate fact sets.
- * <p></p>
- * If candidate fact is not present in any of the candidate fact sets, remove it.
- *
- * @param pruneCause
- */
-/*
- public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCode pruneCause) {
- // remove candidate facts that are not part of any covering set
- pruneCandidateFactWithCandidateSet(new CandidateTablePruneCause(pruneCause));
- }
-*/
- //TODO union : deprecated
- /*
- @Deprecated
-
- public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCause pruneCause) {
- // remove candidate facts that are not part of any covering set
- Set<CandidateFact> allCoveringFacts = new HashSet<CandidateFact>();
- for (Set<CandidateFact> set : candidateFactSets) {
- allCoveringFacts.addAll(set);
- }
- for (Iterator<CandidateFact> i = candidateFacts.iterator(); i.hasNext();) {
- CandidateFact cfact = i.next();
- if (!allCoveringFacts.contains(cfact)) {
- log.info("Not considering fact table:{} as {}", cfact, pruneCause);
- addFactPruningMsgs(cfact.fact, pruneCause);
- i.remove();
- }
- }
- }
-*/
-
public void addQueriedTimeDimensionCols(final String timeDimColName) {
checkArgument(StringUtils.isNotBlank(timeDimColName));
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index abd909f..4dd3d00 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -142,6 +142,7 @@ public class CubeQueryRewriter {
DenormalizationResolver denormResolver = new DenormalizationResolver(conf);
CandidateTableResolver candidateTblResolver = new CandidateTableResolver(conf);
StorageTableResolver storageTableResolver = new StorageTableResolver(conf);
+ // Resolve expressions
rewriters.add(exprResolver);
// De-normalized columns resolved
rewriters.add(denormResolver);
@@ -154,7 +155,8 @@ public class CubeQueryRewriter {
rewriters.add(new GroupbyResolver(conf));
rewriters.add(new FieldValidator());
rewriters.add(storageTableResolver);
- //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates. Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
+ //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates.
+ //TODO union: Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
//TODO union: AggregateResolver,GroupbyResolver,FieldValidator before CoveringSetResolver
// Resolve joins and generate base join tree
rewriters.add(new JoinResolver(conf));
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
index cd44235..c8bf787 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -18,10 +18,10 @@
*/
package org.apache.lens.cube.parse;
-import lombok.Getter;
-import lombok.Setter;
import org.apache.hadoop.hive.ql.parse.ASTNode;
+import lombok.Getter;
+import lombok.Setter;
public class DefaultAliasDecider implements AliasDecider {
@Getter
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 646dbd6..bb29034 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -240,13 +240,13 @@ public class DenormalizationResolver implements ContextRewriter {
private void replaceReferencedColumns(StorageCandidate sc, boolean replaceFact) throws LensException {
QueryAST ast = cubeql;
boolean factRefExists = sc != null && tableToRefCols.get(sc.getName()) != null && !tableToRefCols.get(sc
- .getName()).isEmpty();
+ .getName()).isEmpty();
if (replaceFact && factRefExists) {
ast = sc.getQueryAst();
}
resolveClause(cubeql, ast.getSelectAST());
if (factRefExists) {
- resolveClause(cubeql, sc.getQueryAst().getWhereAST());
+ resolveClause(cubeql, sc.getQueryAst().getWhereAST());
} else {
resolveClause(cubeql, ast.getWhereAST());
}
@@ -347,18 +347,17 @@ public class DenormalizationResolver implements ContextRewriter {
for (Iterator<StorageCandidate> i =
CandidateUtil.getStorageCandidates(cubeql.getCandidates()).iterator(); i.hasNext();) {
StorageCandidate sc = i.next();
- //TODO union : is this happening in pahse 1 or 2 ?
- //TODO union : If phase 2, the below code will not work. Move to phase1 in that case
- if (denormCtx.tableToRefCols.containsKey(sc.getFact().getName())) {
- for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(sc.getFact().getName())) {
- if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
- log.info("Not considering storage candidate :{} as column {} is not available", sc, refcol.col);
- cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(refcol.col.getName()));
- Collection<Candidate> prunedCandidates = CandidateUtil.filterCandidates(cubeql.getCandidates(), sc);
- cubeql.addCandidatePruningMsg(prunedCandidates,
- new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
- }
+ if (denormCtx.tableToRefCols.containsKey(sc.getFact().getName())) {
+ for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(sc.getFact().getName())) {
+ if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
+ log.info("Not considering storage candidate :{} as column {} is not available", sc, refcol.col);
+ cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+ CandidateTablePruneCode.DENORM_COLUMN_NOT_FOUND, refcol.col.getName()));
+ Collection<Candidate> prunedCandidates = CandidateUtil.filterCandidates(cubeql.getCandidates(), sc);
+ cubeql.addCandidatePruningMsg(prunedCandidates,
+ new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
}
+ }
}
}
if (cubeql.getCandidates().size() == 0) {
@@ -376,7 +375,8 @@ public class DenormalizationResolver implements ContextRewriter {
if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
log.info("Not considering dim table:{} as column {} is not available", cdim, refcol.col);
cubeql.addDimPruningMsgs(dim, cdim.dimtable,
- CandidateTablePruneCause.columnNotFound(refcol.col.getName()));
+ CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.DENORM_COLUMN_NOT_FOUND,
+ refcol.col.getName()));
i.remove();
}
}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 0cf4b1c..82113af 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -25,7 +25,6 @@ import java.util.*;
import org.apache.lens.cube.metadata.*;
import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
import org.apache.lens.cube.parse.HQLParser.TreeNode;
import org.apache.lens.server.api.error.LensException;
@@ -450,7 +449,7 @@ class ExpressionResolver implements ContextRewriter {
throws LensException {
replaceAST(cubeql, queryAST.getSelectAST());
if (sc != null) {
- replaceAST(cubeql, sc.getQueryAst().getWhereAST());
+ replaceAST(cubeql, sc.getQueryAst().getWhereAST());
} else {
replaceAST(cubeql, queryAST.getWhereAST());
}
@@ -652,11 +651,13 @@ class ExpressionResolver implements ContextRewriter {
for (ExpressionContext ec : ecSet) {
if (ec.getSrcTable().getName().equals(cubeql.getCube().getName())) {
if (cubeql.getQueriedExprsWithMeasures().contains(expr)) {
- for (Iterator<Candidate> sItr = cubeql.getCandidates().iterator(); sItr.hasNext(); ) {
+ for (Iterator<Candidate> sItr = cubeql.getCandidates().iterator(); sItr.hasNext();) {
Candidate cand = sItr.next();
if (!cand.isExpressionEvaluable(ec)) {
log.info("Not considering Candidate :{} as {} is not evaluable", cand, ec.exprCol.getName());
sItr.remove();
+ cubeql.addCandidatePruningMsg(cand,
+ CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
}
}
} else {
@@ -672,11 +673,11 @@ class ExpressionResolver implements ContextRewriter {
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
}
}
+ }
}
}
}
}
- }
// prune candidate dims without any valid expressions
if (cubeql.getDimensions() != null && !cubeql.getDimensions().isEmpty()) {
for (Dimension dim : cubeql.getDimensions()) {
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 6ccf3d8..c9dc7b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -18,13 +18,11 @@
*/
package org.apache.lens.cube.parse;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
import static org.apache.lens.cube.parse.ColumnResolver.addColumnsForSelectExpr;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import java.util.*;
import org.apache.lens.cube.metadata.AbstractBaseTable;
import org.apache.lens.server.api.error.LensException;
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
index d89e7b4..fa3ba8f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
package org.apache.lens.cube.parse;
import java.util.*;
@@ -55,16 +73,17 @@ public class JoinCandidate implements Candidate {
public boolean contains(Candidate candidate) {
if (this.equals(candidate)) {
return true;
- } else
+ } else {
return childCandidate1.contains(candidate) || childCandidate2.contains(candidate);
+ }
}
@Override
public Collection<Candidate> getChildren() {
- return new ArrayList() {{
- add(childCandidate1);
- add(childCandidate2);
- }};
+ ArrayList<Candidate> joinCandidates = new ArrayList<>();
+ joinCandidates.add(childCandidate1);
+ joinCandidates.add(childCandidate2);
+ return joinCandidates;
}
/**
@@ -73,7 +92,7 @@ public class JoinCandidate implements Candidate {
*/
@Override
public boolean evaluateCompleteness(TimeRange timeRange, TimeRange parentTimeRange, boolean failOnPartialData)
- throws LensException {
+ throws LensException {
return this.childCandidate1.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData)
&& this.childCandidate2.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData);
}
@@ -114,4 +133,4 @@ public class JoinCandidate implements Candidate {
private String getToString() {
return this.toStr = "JOIN[" + childCandidate1.toString() + ", " + childCandidate2.toString() + "]";
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index cb1cd65..153df24 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -52,7 +52,7 @@ class LeastPartitionResolver implements ContextRewriter {
double minPartitions = Collections.min(factPartCount.values());
- for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext(); ) {
+ for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext();) {
Candidate candidate = i.next();
if (factPartCount.get(candidate) > minPartitions) {
log.info("Not considering Candidate:{} as it requires more partitions to be" + " queried:{} minimum:{}",
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 2522d92..4664cde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -95,8 +95,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
}
}
}
- // cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
-
+ //cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
}
private void resolveByDataCompleteness(CubeQueryContext cubeql) {
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index c17e5bf..0c6465a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -23,7 +23,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.apache.lens.cube.metadata.AbstractCubeTable;
import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.commons.lang.StringUtils;
@@ -101,9 +100,9 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
}
}
Map<CandidateTablePruneCause, String> maxCauseMap = Maps.newHashMap();
- for (Map.Entry<CandidateTablePruneCause, List<T>> entry: getReversed().entrySet()) {
+ for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
if (entry.getKey().getCause().equals(maxCause)) {
- maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
+ maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
}
}
return maxCause.getBriefError(maxCauseMap.keySet());
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index b011e47..832b7a4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -101,7 +101,6 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
}
/**
- * TODO union: change CandidateFact to StorageCandidate. Let the callers typecast and send for now.
* @param cubeQl
* @param sc
* @return
@@ -139,7 +138,7 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
private static boolean isColumnAvailableInRange(final TimeRange range, Date startTime, Date endTime) {
return (isColumnAvailableFrom(range.getFromDate(), startTime)
- && isColumnAvailableTill(range.getToDate(), endTime));
+ && isColumnAvailableTill(range.getToDate(), endTime));
}
private static boolean isColumnAvailableFrom(@NonNull final Date date, Date startTime) {
@@ -151,7 +150,7 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
}
public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, StorageCandidate sc, String col) {
- for(TimeRange range : cubeql.getTimeRanges()) {
+ for (TimeRange range : cubeql.getTimeRanges()) {
if (!isColumnAvailableInRange(range, getFactColumnStartTime(sc, col), getFactColumnEndTime(sc, col))) {
return false;
}
@@ -161,32 +160,32 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
public static Date getFactColumnStartTime(StorageCandidate sc, String factCol) {
Date startTime = null;
- for (String key : sc.getTable().getProperties().keySet()) {
- if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
- String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_START_TIME_PFX);
- if (factCol.equals(propCol)) {
- startTime = sc.getTable().getDateFromProperty(key, false, true);
- }
+ for (String key : sc.getTable().getProperties().keySet()) {
+ if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
+ String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_START_TIME_PFX);
+ if (factCol.equals(propCol)) {
+ startTime = sc.getTable().getDateFromProperty(key, false, true);
}
}
+ }
return startTime;
}
public static Date getFactColumnEndTime(StorageCandidate sc, String factCol) {
Date endTime = null;
- for (String key : sc.getTable().getProperties().keySet()) {
- if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
- String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_END_TIME_PFX);
- if (factCol.equals(propCol)) {
- endTime = sc.getTable().getDateFromProperty(key, false, true);
- }
+ for (String key : sc.getTable().getProperties().keySet()) {
+ if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
+ String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_END_TIME_PFX);
+ if (factCol.equals(propCol)) {
+ endTime = sc.getTable().getDateFromProperty(key, false, true);
}
}
- return endTime;
+ }
+ return endTime;
}
static boolean checkForColumnExistsAndValidForRange(StorageCandidate sc, String column, CubeQueryContext cubeql) {
- return (sc.getColumns().contains(column) && isFactColumnValidForRange(cubeql, sc, column));
+ return (sc.getColumns().contains(column) && isFactColumnValidForRange(cubeql, sc, column));
}
}