You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2016/11/09 13:40:38 UTC
lens git commit: LENS-1369: Fixing Druid query rewriter issues as per
the current state of plyql Fixes for the following issues : 1. Plyql BETWEEN
clause excludes both ends. Feature addition to specify range boundary type 2.
Plyql doesn't support having/
Repository: lens
Updated Branches:
refs/heads/master 4ae48c749 -> 08d79631b
LENS-1369: Fixing Druid query rewriter issues as per the current state of plyql
Fixes for the following issues :
1. Plyql BETWEEN clause excludes both ends. Feature addition to specify range boundary type
2. Plyql doesn't support having/orderby queries properly. Code to throw exception in those cases.
Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/08d79631
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/08d79631
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/08d79631
Branch: refs/heads/master
Commit: 08d79631bbe801a768369ab45f662b2785927110
Parents: 4ae48c7
Author: Rajitha R <ra...@gmail.com>
Authored: Wed Nov 9 19:06:57 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Wed Nov 9 19:06:57 2016 +0530
----------------------------------------------------------------------
.../lens/cube/metadata/FactPartition.java | 10 ++
.../lens/cube/parse/BetweenTimeRangeWriter.java | 16 ++++
.../lens/cube/parse/CubeQueryConfUtil.java | 4 +
.../cube/parse/TestBetweenTimeRangeWriter.java | 94 ++++++++++++++++++-
.../lens/cube/parse/TestTimeRangeWriter.java | 9 ++
.../lens/driver/jdbc/DruidSQLRewriter.java | 85 ++++++++++++-----
.../org/apache/lens/driver/jdbc/JDBCDriver.java | 5 +-
.../driver/jdbc/JDBCDriverConfConstants.java | 18 ++++
.../lens/driver/jdbc/TestDruidSQLRewriter.java | 99 +++++++++++++++++---
.../apache/lens/driver/jdbc/TestJdbcDriver.java | 22 +++++
.../drivers/jdbc/druid/jdbcdriver-site.xml | 8 --
src/site/apt/user/cli.apt | 86 ++++++++---------
12 files changed, 362 insertions(+), 94 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 355a1f0..1694b80 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -86,6 +86,16 @@ public class FactPartition implements Comparable<FactPartition> {
}
}
+ public FactPartition previous() throws LensException {
+ return new FactPartition(getPartCol(), getTimePartition().previous(), getContainingPart(), getStorageTables());
+
+ }
+
+ public FactPartition next() throws LensException {
+ return new FactPartition(getPartCol(), getTimePartition().next(), getContainingPart(), getStorageTables());
+
+ }
+
public String getPartString() {
return period.format(partSpec);
}
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
index 046149b..c8b8129 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
@@ -25,6 +25,8 @@ import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.FactPartition;
import org.apache.lens.server.api.error.LensException;
+import com.google.common.collect.BoundType;
+
/**
* Writes partitions queried in timerange as between clause.
@@ -41,6 +43,12 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
boolean useBetweenOnly = cubeQueryContext.getConf().getBoolean(CubeQueryConfUtil.BETWEEN_ONLY_TIME_RANGE_WRITER,
CubeQueryConfUtil.DEFAULT_BETWEEN_ONLY_TIME_RANGE_WRITER);
+ //Fetch the date start and end bounds from config
+ BoundType startBound = BoundType.valueOf(cubeQueryContext.getConf().get(CubeQueryConfUtil.START_DATE_BOUND_TYPE,
+ CubeQueryConfUtil.DEFAULT_START_BOUND_TYPE));
+ BoundType endBound = BoundType.valueOf(cubeQueryContext.getConf().get(CubeQueryConfUtil.END_DATE_BOUND_TYPE,
+ CubeQueryConfUtil.DEFAULT_END_BOUND_TYPE));
+
StringBuilder partStr = new StringBuilder();
if (!useBetweenOnly && rangeParts.size() == 1) {
partStr.append("(");
@@ -75,6 +83,14 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
FactPartition start = parts.first();
FactPartition end = parts.last();
+ if (startBound.equals(BoundType.OPEN)) {
+ start = start.previous();
+ }
+
+ if (endBound.equals(BoundType.OPEN)) {
+ end = end.next();
+ }
+
String partCol = start.getPartCol();
if (cubeQueryContext != null && !cubeQueryContext.shouldReplaceTimeDimWithPart()) {
partCol = cubeQueryContext.getTimeDimOfPartitionColumn(partCol);
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index f20f105..408086f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -105,6 +105,10 @@ public final class CubeQueryConfUtil {
public static final String TIME_RANGE_WRITER_CLASS = "lens.cube.query.time.range.writer.class";
public static final boolean DEFAULT_BETWEEN_ONLY_TIME_RANGE_WRITER = false;
public static final String BETWEEN_ONLY_TIME_RANGE_WRITER = "lens.cube.query.between.only.time.range.writer";
+ public static final String DEFAULT_START_BOUND_TYPE = "CLOSED";
+ public static final String DEFAULT_END_BOUND_TYPE = "CLOSED";
+ public static final String START_DATE_BOUND_TYPE = "lens.cube.query.time.range.writer.start.bound.type";
+ public static final String END_DATE_BOUND_TYPE = "lens.cube.query.time.range.writer.end.bound.type";
public static final Class<? extends TimeRangeWriter> DEFAULT_TIME_RANGE_WRITER = ORTimeRangeWriter.class
.asSubclass(TimeRangeWriter.class);
public static final String PART_WHERE_CLAUSE_DATE_FORMAT = "lens.cube.query.partition.where.clause.format";
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index 07852a0..450605b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -23,16 +23,43 @@ import static org.apache.lens.cube.metadata.DateFactory.*;
import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
import java.text.DateFormat;
-import java.util.*;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.LinkedHashSet;
+import java.util.Set;
import org.apache.lens.cube.metadata.FactPartition;
import org.apache.lens.server.api.error.LensException;
import org.testng.Assert;
+import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
+ public static final String CLOSED = "CLOSED";
+ public static final String OPEN = "OPEN";
+
+ public static final int START_DATE_OFFSET = 1;
+ public static final int END_DATE_OFFSET = 2;
+
+ public static final DateFormat DAY_DB_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
+
+ private static Set<FactPartition> answeringParts;
+ private static Set<FactPartition> answeringPartsWithFormat;
+
+ static {
+ answeringParts = new LinkedHashSet<FactPartition>();
+ answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, START_DATE_OFFSET), DAILY, null, null));
+ answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, END_DATE_OFFSET), DAILY, null, null));
+
+ answeringPartsWithFormat = new LinkedHashSet<>();
+ answeringPartsWithFormat.add(new FactPartition("dt", getDateWithOffset(DAILY, START_DATE_OFFSET), DAILY, null,
+ DAY_DB_FORMAT));
+ answeringPartsWithFormat.add(new FactPartition("dt", getDateWithOffset(DAILY, END_DATE_OFFSET), DAILY, null,
+ DAY_DB_FORMAT));
+
+ }
@Override
public TimeRangeWriter getTimerangeWriter() {
return new BetweenTimeRangeWriter();
@@ -91,4 +118,63 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
}
Assert.assertEquals(expected, whereClause);
}
+
+ @DataProvider
+ public Object[][] getBoundTypes() {
+
+ Object[][] data = new Object[4][2];
+
+ data[0][0] = OPEN;
+ data[0][1] = OPEN;
+
+ data[1][0] = OPEN;
+ data[1][1] = CLOSED;
+
+ data[2][0] = CLOSED;
+ data[2][1] = OPEN;
+
+ data[3][0] = CLOSED;
+ data[3][1] = CLOSED;
+
+ return data;
+ }
+
+ @Test(dataProvider = "getBoundTypes")
+ public void testBetweenBoundTypes(String startBoundType, String endBoundType)
+ throws LensException {
+ String whereClause =
+ getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContextForBounds(startBoundType, endBoundType),
+ "test", answeringParts);
+
+ int testStartOffset = START_DATE_OFFSET;
+ int testEndOffset = END_DATE_OFFSET;
+ if (startBoundType.equals(OPEN)) {
+ testStartOffset = START_DATE_OFFSET - 1;
+ }
+ if (endBoundType.equals(OPEN)) {
+ testEndOffset = END_DATE_OFFSET + 1;
+ }
+ validateBetweenBoundTypes(whereClause, null, testStartOffset, testEndOffset);
+
+ whereClause =
+ getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContextForBounds(startBoundType, endBoundType),
+ "test", answeringPartsWithFormat);
+
+ validateBetweenBoundTypes(whereClause, DAY_DB_FORMAT, testStartOffset, testEndOffset);
+ }
+
+ private void validateBetweenBoundTypes(String whereClause, DateFormat format, int testStartOffset, int testEndOffset)
+ {
+ String expected = null;
+ if (format == null) {
+ expected =
+ getBetweenClause("test", "dt", getDateWithOffset(DAILY, testStartOffset),
+ getDateWithOffset(DAILY, testEndOffset), DAILY.format());
+ } else {
+ expected =
+ getBetweenClause("test", "dt", getDateWithOffset(DAILY, testStartOffset),
+ getDateWithOffset(DAILY, testEndOffset), format);
+ }
+ Assert.assertEquals(expected, whereClause);
+ }
}
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
index 3417031..f28f9e1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
@@ -62,6 +62,15 @@ public abstract class TestTimeRangeWriter {
return context;
}
+ protected CubeQueryContext getMockedCubeContextForBounds(String startBoundType, String endBoundType) {
+ CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
+ Configuration configuration = new Configuration();
+ configuration.set(CubeQueryConfUtil.START_DATE_BOUND_TYPE, startBoundType);
+ configuration.set(CubeQueryConfUtil.END_DATE_BOUND_TYPE, endBoundType);
+ Mockito.when(context.getConf()).thenReturn(configuration);
+ Mockito.when(context.shouldReplaceTimeDimWithPart()).thenReturn(true);
+ return context;
+ }
public void validateSingle(String whereClause, DateFormat format) {
List<String> parts = new ArrayList<String>();
if (format == null) {
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
index eb1d69c..2351fb3 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
@@ -35,12 +35,25 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.QB;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class DruidSQLRewriter extends ColumnarSQLRewriter {
/**
+ * Is Having supported.
+ */
+ @Getter
+ protected boolean isHavingSupported;
+
+ /**
+ * Is OrderBy supported.
+ */
+ @Getter
+ protected boolean isOrderBySupported;
+
+ /**
* Whether to resolve native tables or not. In case the query has sub query, the outer query may not
* require native table resolution
*/
@@ -79,14 +92,6 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
this.whereAST = qb.getParseInfo().getWhrForClause(clauseName);
}
- if (qb.getParseInfo().getHavingForClause(clauseName) != null) {
- this.havingAST = qb.getParseInfo().getHavingForClause(clauseName);
- }
-
- if (qb.getParseInfo().getOrderByForClause(clauseName) != null) {
- this.orderByAST = qb.getParseInfo().getOrderByForClause(clauseName);
- }
-
if (qb.getParseInfo().getGroupByForClause(clauseName) != null) {
this.groupByAST = qb.getParseInfo().getGroupByForClause(clauseName);
}
@@ -95,6 +100,14 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
this.selectAST = qb.getParseInfo().getSelForClause(clauseName);
}
+ if (qb.getParseInfo().getHavingForClause(clauseName) != null) {
+ this.havingAST = qb.getParseInfo().getHavingForClause(clauseName);
+ }
+
+ if (qb.getParseInfo().getOrderByForClause(clauseName) != null) {
+ this.orderByAST = qb.getParseInfo().getOrderByForClause(clauseName);
+ }
+
this.fromAST = HQLParser.findNodeByPath(ast, TOK_FROM);
}
@@ -116,11 +129,19 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
ArrayList<String> filters = new ArrayList<>();
getWhereString(whereAST, filters);
+ String havingTree = null;
+ String orderbyTree = null;
+
+ if (isHavingSupported) {
+ havingTree = HQLParser.getString(havingAST, HQLParser.AppendMode.DEFAULT);
+ }
+
+ if (isOrderBySupported) {
+ orderbyTree = HQLParser.getString(orderByAST, HQLParser.AppendMode.DEFAULT);
+ }
// construct query with fact sub query
constructQuery(HQLParser.getString(selectAST, HQLParser.AppendMode.DEFAULT), filters,
- HQLParser.getString(groupByAST, HQLParser.AppendMode.DEFAULT),
- HQLParser.getString(havingAST, HQLParser.AppendMode.DEFAULT),
- HQLParser.getString(orderByAST, HQLParser.AppendMode.DEFAULT), limit);
+ HQLParser.getString(groupByAST, HQLParser.AppendMode.DEFAULT), havingTree, orderbyTree, limit);
}
@@ -163,20 +184,20 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
/**
* Construct final query using all trees
*
- * @param selecttree the selecttree
+ * @param selectTree the selecttree
* @param whereFilters the wheretree
- * @param groupbytree the groupbytree
- * @param havingtree the havingtree
- * @param orderbytree the orderbytree
+ * @param groupbyTree the groupbytree
+ * @param havingTree the havingtree
+ * @param orderbyTree the orderbytree
* @param limit the limit
*/
private void constructQuery(
- String selecttree, ArrayList<String> whereFilters, String groupbytree,
- String havingtree, String orderbytree, String limit) {
+ String selectTree, ArrayList<String> whereFilters, String groupbyTree, String
+ havingTree, String orderbyTree, String limit) {
log.info("In construct query ..");
- rewrittenQuery.append("select ").append(selecttree.replaceAll("`", "\"")).append(" from ");
+ rewrittenQuery.append("select ").append(selectTree.replaceAll("`", "\"")).append(" from ");
String factNameAndAlias = getFactNameAlias(fromAST);
@@ -185,14 +206,14 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
if (!whereFilters.isEmpty()) {
rewrittenQuery.append(" where ").append(StringUtils.join(whereFilters, " and "));
}
- if (StringUtils.isNotBlank(groupbytree)) {
- rewrittenQuery.append(" group by ").append(groupbytree);
+ if (StringUtils.isNotBlank(groupbyTree)) {
+ rewrittenQuery.append(" group by ").append(groupbyTree);
}
- if (StringUtils.isNotBlank(havingtree)) {
- rewrittenQuery.append(" having ").append(havingtree);
+ if (StringUtils.isNotBlank(havingTree)) {
+ rewrittenQuery.append(" having ").append(havingTree);
}
- if (StringUtils.isNotBlank(orderbytree)) {
- rewrittenQuery.append(" order by ").append(orderbytree);
+ if (StringUtils.isNotBlank(orderbyTree)) {
+ rewrittenQuery.append(" order by ").append(orderbyTree);
}
if (StringUtils.isNotBlank(limit)) {
rewrittenQuery.append(" limit ").append(limit);
@@ -222,6 +243,10 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
rewrittenQuery.setLength(0);
reset();
this.ast = currNode;
+ isHavingSupported = conf.getBoolean(JDBCDriverConfConstants.JDBC_IS_HAVING_SUPPORTED,
+ JDBCDriverConfConstants.DEFAULT_JDBC_IS_HAVING_SUPPORTED);
+ isOrderBySupported = conf.getBoolean(JDBCDriverConfConstants.JDBC_IS_ORDERBY_SUPPORTED,
+ JDBCDriverConfConstants.DEFAULT_JDBC_IS_ORDERBY_SUPPORTED);
ASTNode fromNode = HQLParser.findNodeByPath(currNode, TOK_FROM);
if (fromNode != null) {
@@ -239,6 +264,18 @@ public class DruidSQLRewriter extends ColumnarSQLRewriter {
throw new LensException("Union queries are not supported by " + this + " Query : " + this.query);
}
+ if (!isHavingSupported
+ && HQLParser.findNodeByPath(currNode, HiveParser.TOK_INSERT, HiveParser.TOK_HAVING) != null) {
+ log.warn("Having queries are not supported by {} Query : {}", this, this.query);
+ throw new LensException("Having queries are not supported by " + this + " Query : " + this.query);
+ }
+
+ if (!isOrderBySupported
+ && HQLParser.findNodeByPath(currNode, HiveParser.TOK_INSERT, HiveParser.TOK_ORDERBY) != null) {
+ log.warn("Order by queries are not supported by {} Query : {}", this, this.query);
+ throw new LensException("Order by queries are not supported by " + this + " Query : " + this.query);
+ }
+
String rewritternQueryText = rewrittenQuery.toString();
if (currNode.getToken().getType() == TOK_QUERY) {
try {
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index 7ce1a97..528f857 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -98,6 +98,7 @@ public class JDBCDriver extends AbstractLensDriver {
private LogSegregationContext logSegregationContext;
private boolean isStatementCancelSupported;
+
/**
* Data related to a query submitted to JDBCDriver.
*/
@@ -728,7 +729,8 @@ public class JDBCDriver extends AbstractLensDriver {
try {
conn = calledForEstimate ? getEstimateConnection() : getConnection();
stmt = conn.prepareStatement(rewrittenQuery);
- if (stmt.getWarnings() != null) {
+ if (!pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_SKIP_WARNINGS,
+ DEFAULT_JDBC_VALIDATE_SKIP_WARNINGS) && stmt.getWarnings() != null) {
throw new LensException(stmt.getWarnings());
}
} catch (SQLException sql) {
@@ -1045,6 +1047,7 @@ public class JDBCDriver extends AbstractLensDriver {
public void writeExternal(ObjectOutput arg0) throws IOException {
// TODO Auto-generated method stub
}
+
@Override
public StatusUpdateMethod getStatusUpdateMethod() {
return StatusUpdateMethod.PUSH;
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriverConfConstants.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriverConfConstants.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriverConfConstants.java
index f4e0451..51abc96 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriverConfConstants.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriverConfConstants.java
@@ -96,6 +96,24 @@ public final class JDBCDriverConfConstants {
/** The Constant DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE. */
public static final boolean DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE = true;
+ /** The Constant JDBC_VALIDATE_SKIP_WARNINGS */
+ public static final String JDBC_VALIDATE_SKIP_WARNINGS = JDBC_DRIVER_PFX + "validate.skip.warnings";
+
+ /** The Constant DEFAULT_JDBC_VALIDATE_SKIP_WARNINGS. */
+ public static final boolean DEFAULT_JDBC_VALIDATE_SKIP_WARNINGS = false;
+
+ /** The Constant JDBC_HAVING_SUPPORT. */
+ public static final String JDBC_IS_HAVING_SUPPORTED = JDBC_DRIVER_PFX + "is.having.supported";
+
+ /** The Constant JDBC_ORDERBY_SUPPORT. */
+ public static final String JDBC_IS_ORDERBY_SUPPORTED = JDBC_DRIVER_PFX + "is.orderby.supported";
+
+ /** The Constant Default JDBC_HAVING_SUPPORT. */
+ public static final boolean DEFAULT_JDBC_IS_HAVING_SUPPORTED = true;
+
+ /** The Constant Default JDBC_ORDERBY_SUPPORT. */
+ public static final boolean DEFAULT_JDBC_IS_ORDERBY_SUPPORTED = true;
+
public static final String JDBC_ENABLE_RESULTSET_STREAMING_RETRIEVAL =
JDBC_DRIVER_PFX + "enable.resultset.streaming.retrieval";
public static final boolean DEFAULT_JDBC_ENABLE_RESULTSET_STREAMING_RETRIEVAL = false;
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
index c4cc91d..9cc60f0 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
+import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import lombok.extern.slf4j.Slf4j;
@@ -53,6 +54,9 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
public class TestDruidSQLRewriter {
+ public static final String TRUE = "true";
+ public static final String FALSE = "false";
+
HiveConf hconf = new HiveConf();
Configuration conf = new Configuration();
DruidSQLRewriter qtest = new DruidSQLRewriter();
@@ -156,10 +160,10 @@ public class TestDruidSQLRewriter {
@Test
public void testRewrittenQuery() throws LensException {
+ conf.set(JDBCDriverConfConstants.JDBC_IS_ORDERBY_SUPPORTED, TRUE);
String query =
- "select fact.time_key as `Time Key`, sum(fact.dollars_sold) from sales_fact fact group by fact.time_key order"
- + " by dollars_sold ";
-
+ "select fact.time_key as `Time Key`, sum(fact.dollars_sold) from sales_fact fact group by fact.time_key order "
+ + "by dollars_sold";
SessionState.start(hconf);
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select ( fact . time_key ) as \"Time Key\" , sum(( fact . dollars_sold )) from sales_fact "
@@ -190,8 +194,7 @@ public class TestDruidSQLRewriter {
+ "where fact.item_key in (select item_key from test.item_dim idim where idim.item_name = 'item_1') "
+ "and fact.location_key in (select location_key from test.location_dim ldim where "
+ "ldim.location_name = 'loc_1') "
- + "group by time_dim.day_of_week "
- + "order by dollars_sold";
+ + "group by time_dim.day_of_week ";
SessionState.start(hconf);
@@ -206,7 +209,7 @@ public class TestDruidSQLRewriter {
@Test
public void testUnionQueryFail() {
String query = "select a,sum(b)as b from ( select a,b from tabl1 where a<=10 union all select a,b from tabl2 where"
- + " a>10 and a<=20 union all select a,b from tabl3 where a>20 )unionResult group by a order by b desc limit 10";
+ + " a>10 and a<=20 union all select a,b from tabl3 where a>20 )unionResult group by a limit 10";
SessionState.start(hconf);
try {
@@ -217,6 +220,75 @@ public class TestDruidSQLRewriter {
}
}
+ @DataProvider
+ public Object[][] getHavingOrderByDataFail() {
+ Object[][] data = new Object[3][2];
+
+ data[0][0] = TRUE;
+ data[0][1] = FALSE;
+
+ data[1][0] = FALSE;
+ data[1][1] = TRUE;
+
+ data[2][0] = FALSE;
+ data[2][1] = FALSE;
+
+ return data;
+ }
+
+ @DataProvider
+ public Object[][] getHavingOrderByDataPass() {
+
+ Object[][] data = new Object[3][4];
+
+ data[0][0] = TRUE;
+ data[0][1] = TRUE;
+ data[0][2] = "select a, sum(b) from tabl1 where a<=10 group by a having sum(b) > 10 order by a desc limit 10";
+ data[0][3] = "select a, sum(b) from tabl1 where (a <= 10) group by a having (sum(b) > 10) order by a desc limit 10";
+
+ data[1][0] = TRUE;
+ data[1][1] = FALSE;
+ data[1][2] = "select a, sum(b) from tabl1 where a<=10 group by a having sum(b) > 10 limit 10";
+ data[1][3] = "select a, sum(b) from tabl1 where (a <= 10) group by a having (sum(b) > 10) limit 10";
+
+ data[2][0] = FALSE;
+ data[2][1] = TRUE;
+ data[2][2] = "select a, sum(b) from tabl1 where a<=10 group by a order by a desc limit 10";
+ data[2][3] = "select a, sum(b) from tabl1 where (a <= 10) group by a order by a desc limit 10";
+
+ return data;
+ }
+
+
+ @Test(dataProvider = "getHavingOrderByDataPass")
+ public void testHavingOrderByQueryTest(String isHavingSupported, String isOrderBySupported, String inputQuery,
+ String expectedQuery) throws LensException {
+
+ conf.set(JDBCDriverConfConstants.JDBC_IS_HAVING_SUPPORTED, isHavingSupported);
+ conf.set(JDBCDriverConfConstants.JDBC_IS_ORDERBY_SUPPORTED, isOrderBySupported);
+
+ SessionState.start(hconf);
+ String actualQuery = qtest.rewrite(inputQuery, conf, hconf);
+ compareQueries(expectedQuery, actualQuery);
+ }
+
+ @Test(dataProvider = "getHavingOrderByDataFail")
+ public void testHavingOrderByQueryTestFail(String isHavingSupported, String isOrderBySupported) {
+
+ conf.set(JDBCDriverConfConstants.JDBC_IS_HAVING_SUPPORTED, isHavingSupported);
+ conf.set(JDBCDriverConfConstants.JDBC_IS_ORDERBY_SUPPORTED, isOrderBySupported);
+
+ String query = "select a,sum(b) from tabl1 where a<=10 group by a having sum(b) > 10 order by a desc limit 10";
+
+ SessionState.start(hconf);
+ try {
+ qtest.rewrite(query, conf, hconf);
+ Assert.fail("The invalid query did NOT suffer any exception");
+ } catch (LensException e) {
+ System.out.println("Exception as expected in Having/Orderby query..");
+ }
+ }
+
/**
* Test replace db name.
*
@@ -346,7 +418,7 @@ public class TestDruidSQLRewriter {
createTable(hconf, testDB, "mytable", "testDB", "testTable_1", false, columnMap);
String query = "SELECT t1.id, t1.name, sum(t1.dollars_sold), sum(t1.units_sold) FROM " + testDB
- + ".mytable t1 WHERE t1.id = 100 GROUP BY t1.id HAVING count(t1.id) > 2 ORDER BY t1.id";
+ + ".mytable t1 WHERE t1.id = 100 GROUP BY t1.id ";
DruidSQLRewriter rewriter = new DruidSQLRewriter();
rewriter.init(conf);
@@ -358,7 +430,7 @@ public class TestDruidSQLRewriter {
System.out.println("Actual : " + actual);
String expected =
"select (t1.id1), (t1.name1), sum((t1.Dollars_Sold)), sum((t1.Units_Sold)) from testDB.testTable_1 t1 where ("
- + "(t1.id1) = 100) group by (t1.id1) having (count((t1.id1)) > 2) order by t1.id1 asc";
+ + "(t1.id1) = 100) group by (t1.id1) ";
compareQueries(actual, expected);
@@ -372,13 +444,12 @@ public class TestDruidSQLRewriter {
/**
* Creates the table.
*
- * @param db the db
- * @param table the table
- * @param udb the udb
- * @param utable the utable
+ * @param db the db
+ * @param table the table
+ * @param udb the udb
+ * @param utable the utable
* @param setCustomSerde whether to set custom serde or not
- * @param columnMapping columnmapping for the table
- *
+ * @param columnMapping columnmapping for the table
* @throws Exception the exception
*/
void createTable(
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
index 6291021..1147d82 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
@@ -664,6 +664,28 @@ public class TestJdbcDriver {
}
/**
+ * Test prepare skip warnings
+ *
+ * @throws Exception the exception
+ */
+ @Test
+ public void testPrepareSkipWarnings() throws Exception {
+ createTable("prepare_test3");
+ createTable("prepare_test3", driver.getEstimateConnection());
+ insertData("prepare_test3");
+ insertData("prepare_test3", driver.getEstimateConnection());
+
+ final String query = "SELECT * from prepare_test3";
+
+ PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
+ pContext.getDriverConf(driver).setBoolean(JDBC_VALIDATE_SKIP_WARNINGS, true);
+ //run validate
+ driver.validate(pContext);
+ //run prepare
+ driver.prepare(pContext);
+ }
+
+ /**
* Test execute async.
*
* @throws Exception the exception
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml b/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
index e4fad23..c03f68e 100644
--- a/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
+++ b/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
@@ -44,10 +44,6 @@
<value>org.apache.lens.driver.jdbc.DruidSQLRewriter</value>
</property>
<property>
- <name>lens.driver.jdbc.validate.through.prepare</name>
- <value>false</value>
- </property>
- <property>
<name>lens.driver.jdbc.explain.keyword</name>
<value>explain plan for </value>
</property>
@@ -59,8 +55,4 @@
<name>lens.cube.query.partition.where.clause.format</name>
<value>yyyy-MM-dd HH:mm:ss</value>
</property>
- <property>
- <name>lens.cube.query.between.only.time.range.writer</name>
- <value>true</value>
- </property>
</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lens/blob/08d79631/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index cf9657a..caffda7 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -134,7 +134,7 @@ User CLI Commands
*--+--+
|create database [--db] \<database-name\> [--ignoreIfExists \<ignore-if-exists\>]|create a database with specified name. if <<<ignore-if-exists>>> is true, create will not be tried if already exists. Default is false\ |
*--+--+
-|drop database [--db] \<database-name\> [--cascade ]|drop a database with specified name\ |
+|drop database [--db/--database] \<database-name\> [--cascade ]|drop a database with specified name\ |
*--+--+
|show databases|displays list of all databases\ |
*--+--+
@@ -389,48 +389,48 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|schema [--db] \<database-to-create-schema-in\> [--file] \<schema-directory\>|Parses the specified resource file and executes commands for creation/updation of schema \ |
-| |Expected\ structure\ is\ \ |
-| |. \ |
-| |\|--\ storages \ |
-| |\|\ \ \|--\ storage1.xml \ |
-| |\|\ \ \|--\ storage2.xml \ |
-| |\| \ |
-| |\|--\ dimensions \ |
-| |\|\ \ \|--\ dim1.xml \ |
-| |\|\ \ \|--\ dim2.xml \ |
-| |\| \ |
-| |\|--\ cubes \ |
-| |\|\ \ \|--\ base \ |
-| |\|\ \ \|\ \ \|--\ base_cube1.xml \ |
-| |\|\ \ \|\ \ \|--\ base_cube2.xml \ |
-| |\|\ \ \| \ |
-| |\|\ \ \|--\ derived \ |
-| |\|\ \ \|\ \ \|--\ derived_cube1.xml \ |
-| |\|\ \ \|\ \ \|--\ derived_cube2.xml \ |
-| |\|\ \ \| \ |
-| |\|\ \ \|--\ independent_cube1.xml \ |
-| |\|\ \ \|--\ independent_cube2.xml \ |
-| |\| \ |
-| |\|--\ dimensiontables \ |
-| |\|\ \ \|--\ dimtable1.xml \ |
-| |\|\ \ \|--\ dimtable2.xml \ |
-| |\| \ |
-| |\|--\ dimtables \ |
-| |\|\ \ \|--\ dimtable3.xml \ |
-| |\|\ \ \|--\ dimtable4.xml \ |
-| |\| \ |
-| |\|--\ facts \ |
-| |\ \ \ \|--\ fact1.xml \ |
-| |\ \ \ \|--\ fact2.xml \ |
-| | \ |
-| | \ |
-| |If\ your\ cubes\ are\ divided\ between\ base\ and\ derived\ cubes, \ |
-| |it\ makes\ sense\ to\ seperate\ into\ two\ directories,\ since\ derived\ cubes\ can't\ be\ created\ unless\ base\ cube\ exists.\ |
-| |In\ the\ other\ case\ you\ can\ keep\ them\ in\ the\ cubes\ directory\ itself. \ |
-| |For\ dimtables,\ you\ can\ keep\ your\ schema\ files\ in\ a\ directory\ named\ either\ dimtables\ or\ dimensiontables. \ |
-| |Each\ of\ these\ directories\ is\ optional\ and\ the\ order\ of\ processing\ is\ top\ to\ bottom. \ |
-| |CLI\ will\ let\ you\ know\ in\ case\ of\ any\ errors\ and\ proceed\ further\ without\ failing\ in\ between. \ |
+|schema/create schema [--db] \<database-to-create-schema-in\> [--path/--file] \<schema-directory\>|Parses the specified resource file and executes commands for creation/updation of schema \ |
+| |Expected\ structure\ is\ \ |
+| |. \ |
+| |\|--\ storages \ |
+| |\|\ \ \|--\ storage1.xml \ |
+| |\|\ \ \|--\ storage2.xml \ |
+| |\| \ |
+| |\|--\ dimensions \ |
+| |\|\ \ \|--\ dim1.xml \ |
+| |\|\ \ \|--\ dim2.xml \ |
+| |\| \ |
+| |\|--\ cubes \ |
+| |\|\ \ \|--\ base \ |
+| |\|\ \ \|\ \ \|--\ base_cube1.xml \ |
+| |\|\ \ \|\ \ \|--\ base_cube2.xml \ |
+| |\|\ \ \| \ |
+| |\|\ \ \|--\ derived \ |
+| |\|\ \ \|\ \ \|--\ derived_cube1.xml \ |
+| |\|\ \ \|\ \ \|--\ derived_cube2.xml \ |
+| |\|\ \ \| \ |
+| |\|\ \ \|--\ independent_cube1.xml \ |
+| |\|\ \ \|--\ independent_cube2.xml \ |
+| |\| \ |
+| |\|--\ dimensiontables \ |
+| |\|\ \ \|--\ dimtable1.xml \ |
+| |\|\ \ \|--\ dimtable2.xml \ |
+| |\| \ |
+| |\|--\ dimtables \ |
+| |\|\ \ \|--\ dimtable3.xml \ |
+| |\|\ \ \|--\ dimtable4.xml \ |
+| |\| \ |
+| |\|--\ facts \ |
+| |\ \ \ \|--\ fact1.xml \ |
+| |\ \ \ \|--\ fact2.xml \ |
+| | \ |
+| | \ |
+| |If\ your\ cubes\ are\ divided\ between\ base\ and\ derived\ cubes, \ |
+| |it\ makes\ sense\ to\ seperate\ into\ two\ directories,\ since\ derived\ cubes\ can't\ be\ created\ unless\ base\ cube\ exists.\ |
+| |In\ the\ other\ case\ you\ can\ keep\ them\ in\ the\ cubes\ directory\ itself. \ |
+| |For\ dimtables,\ you\ can\ keep\ your\ schema\ files\ in\ a\ directory\ named\ either\ dimtables\ or\ dimensiontables. \ |
+| |Each\ of\ these\ directories\ is\ optional\ and\ the\ order\ of\ processing\ is\ top\ to\ bottom. \ |
+| |CLI\ will\ let\ you\ know\ in\ case\ of\ any\ errors\ and\ proceed\ further\ without\ failing\ in\ between. \ |
*--+--+
<<Lens Schema Commands>>