You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vo...@apache.org on 2018/07/23 11:45:14 UTC
[drill] 02/02: DRILL-6574: Code cleanup
This is an automated email from the ASF dual-hosted git repository.
volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
commit 5f4eeb8943e0e009136fcb5e61b51931280dac3f
Author: Bohdan Kazydub <bo...@gmail.com>
AuthorDate: Fri Jul 13 17:13:45 2018 +0300
DRILL-6574: Code cleanup
- Fixed failing test
---
.../planner/sql/handlers/FindLimit0Visitor.java | 61 ++++++++++------------
.../java/org/apache/drill/TestPartitionFilter.java | 2 +-
.../impl/limit/TestLateLimit0Optimization.java | 42 +++++++++------
3 files changed, 53 insertions(+), 52 deletions(-)
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindLimit0Visitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindLimit0Visitor.java
index 3746d7e..8f44445 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindLimit0Visitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/FindLimit0Visitor.java
@@ -19,7 +19,6 @@ package org.apache.drill.exec.planner.sql.handlers;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelShuttle;
@@ -75,7 +74,6 @@ import java.util.Set;
* executing a schema-only query.
*/
public class FindLimit0Visitor extends RelShuttleImpl {
-// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FindLimit0Visitor.class);
// Some types are excluded in this set:
// + VARBINARY is not fully tested.
@@ -95,6 +93,25 @@ public class FindLimit0Visitor extends RelShuttleImpl {
SqlTypeName.INTERVAL_MINUTE_SECOND, SqlTypeName.INTERVAL_SECOND, SqlTypeName.CHAR, SqlTypeName.DECIMAL)
.build();
+ private static final Set<String> unsupportedFunctions = ImmutableSet.<String>builder()
+ // see Mappify
+ .add("KVGEN")
+ .add("MAPPIFY")
+ // see DummyFlatten
+ .add("FLATTEN")
+ // see JsonConvertFrom
+ .add("CONVERT_FROMJSON")
+ // see JsonConvertTo class
+ .add("CONVERT_TOJSON")
+ .add("CONVERT_TOSIMPLEJSON")
+ .add("CONVERT_TOEXTENDEDJSON")
+ .build();
+
+ private boolean contains = false;
+
+ private FindLimit0Visitor() {
+ }
+
/**
* If all field types of the given node are {@link #TYPES recognized types} and honored by execution, then this
* method returns the tree: DrillDirectScanRel(field types). Otherwise, the method returns null.
@@ -104,8 +121,7 @@ public class FindLimit0Visitor extends RelShuttleImpl {
*/
public static DrillRel getDirectScanRelIfFullySchemaed(RelNode rel) {
final List<RelDataTypeField> fieldList = rel.getRowType().getFieldList();
- final List<TypeProtos.MajorType> columnTypes = Lists.newArrayList();
-
+ final List<TypeProtos.MajorType> columnTypes = new ArrayList<>();
for (final RelDataTypeField field : fieldList) {
final SqlTypeName sqlTypeName = field.getType().getSqlTypeName();
@@ -145,32 +161,6 @@ public class FindLimit0Visitor extends RelShuttleImpl {
return visitor.isContains();
}
- private boolean contains = false;
-
- private static final Set<String> unsupportedFunctions = ImmutableSet.<String>builder()
- // see Mappify
- .add("KVGEN")
- .add("MAPPIFY")
- // see DummyFlatten
- .add("FLATTEN")
- // see JsonConvertFrom
- .add("CONVERT_FROMJSON")
- // see JsonConvertTo class
- .add("CONVERT_TOJSON")
- .add("CONVERT_TOSIMPLEJSON")
- .add("CONVERT_TOEXTENDEDJSON")
- .build();
-
- private static boolean isUnsupportedScalarFunction(final SqlOperator operator) {
- return operator instanceof DrillSqlOperator &&
- unsupportedFunctions.contains(operator.getName().toUpperCase());
- }
-
- /**
- * TODO(DRILL-3993): Use RelBuilder to create a limit node to allow for applying this optimization in potentially
- * any of the transformations, but currently this can be applied after Drill logical transformation, and before
- * Drill physical transformation.
- */
public static DrillRel addLimitOnTopOfLeafNodes(final DrillRel rel) {
final Pointer<Boolean> isUnsupported = new Pointer<>(false);
@@ -195,7 +185,9 @@ public class FindLimit0Visitor extends RelShuttleImpl {
isUnsupported.value = true;
return other;
} else if (other instanceof DrillProjectRelBase) {
- other.accept(unsupportedFunctionsVisitor);
+ if (!isUnsupported.value) {
+ other.accept(unsupportedFunctionsVisitor);
+ }
if (isUnsupported.value) {
return other;
}
@@ -231,7 +223,7 @@ public class FindLimit0Visitor extends RelShuttleImpl {
@Override
public RelNode visit(RelNode other) {
- if (other.getInputs().size() == 0) { // leaf operator
+ if (other.getInputs().isEmpty()) { // leaf operator
return addLimitAsParent(other);
}
return super.visit(other);
@@ -241,7 +233,9 @@ public class FindLimit0Visitor extends RelShuttleImpl {
return (DrillRel) rel.accept(addLimitOnScanVisitor);
}
- private FindLimit0Visitor() {
+ private static boolean isUnsupportedScalarFunction(final SqlOperator operator) {
+ return operator instanceof DrillSqlOperator &&
+ unsupportedFunctions.contains(operator.getName().toUpperCase());
}
boolean isContains() {
@@ -275,7 +269,6 @@ public class FindLimit0Visitor extends RelShuttleImpl {
return super.visit(other);
}
- // TODO: The following nodes are never visited because this visitor is used after logical transformation!
// The following set of RelNodes should terminate a search for the limit 0 pattern as they want convey its meaning.
@Override
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java b/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
index 8b001f6..e00e5dc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
@@ -282,7 +282,7 @@ public class TestPartitionFilter extends PlanTestBase {
// with Parquet RG filter pushdown, reduce to 1 file ( o_custkey all > 10).
// There is a LIMIT(0) inserted on top of SCAN, so filter push down is not applied.
// Since this is a LIMIT 0 query, not pushing down the filter should not cause a perf. regression.
- testIncludeFilter(query, 4, "Filter", 0);
+ testIncludeFilter(query, 1, "Filter\\(", 0);
}
@Test // see DRILL-2852 and DRILL-3591
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLateLimit0Optimization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLateLimit0Optimization.java
index f819260..94fcab5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLateLimit0Optimization.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLateLimit0Optimization.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -21,30 +21,23 @@ import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.PlanTestBase;
import org.junit.Test;
-public class TestLateLimit0Optimization extends BaseTestQuery {
+import static org.apache.drill.exec.ExecConstants.LATE_LIMIT0_OPT_KEY;
- private static String wrapLimit0(final String query) {
- return "SELECT * FROM (" + query + ") LZT LIMIT 0";
- }
+public class TestLateLimit0Optimization extends BaseTestQuery {
- private static void checkThatQueryIsOptimized(final String query) throws Exception {
- PlanTestBase.testPlanMatchingPatterns(wrapLimit0(query),
- new String[]{
- ".*Limit\\(offset=\\[0\\], fetch=\\[0\\]\\)(.*[\n\r])+.*Scan.*"
- }, new String[]{});
+ @Test
+ public void convertFromJson() throws Exception {
+ checkThatQueryIsNotOptimized("SELECT CONVERT_FROM('{x:100, y:215.6}' ,'JSON') AS MYCOL FROM (VALUES(1))");
}
private static void checkThatQueryIsNotOptimized(final String query) throws Exception {
PlanTestBase.testPlanMatchingPatterns(wrapLimit0(query),
- new String[]{},
- new String[]{
- ".*Limit\\(offset=\\[0\\], fetch=\\[0\\]\\)(.*[\n\r])+.*Scan.*"
- });
+ null,
+ new String[] {".*Limit\\(offset=\\[0\\], fetch=\\[0\\]\\)(.*[\n\r])+.*Scan.*"});
}
- @Test
- public void convertFromJson() throws Exception {
- checkThatQueryIsNotOptimized("SELECT CONVERT_FROM('{x:100, y:215.6}' ,'JSON') AS MYCOL FROM (VALUES(1))");
+ private static String wrapLimit0(final String query) {
+ return "SELECT * FROM (" + query + ") LZT LIMIT 0";
}
@Test
@@ -52,6 +45,11 @@ public class TestLateLimit0Optimization extends BaseTestQuery {
checkThatQueryIsOptimized("SELECT CONVERT_TO(r_regionkey, 'INT_BE') FROM cp.`tpch/region.parquet`");
}
+ private static void checkThatQueryIsOptimized(final String query) throws Exception {
+ PlanTestBase.testPlanMatchingPatterns(wrapLimit0(query),
+ new String[] {".*Limit\\(offset=\\[0\\], fetch=\\[0\\]\\)(.*[\n\r])+.*Scan.*"});
+ }
+
@Test
public void convertToOthers() throws Exception {
checkThatQueryIsOptimized("SELECT r_regionkey,\n" +
@@ -109,4 +107,14 @@ public class TestLateLimit0Optimization extends BaseTestQuery {
"FROM cp.`employee.json`");
}
+ @Test
+ public void testLimit0IsAbsentWhenDisabled() throws Exception {
+ String query = "SELECT CONVERT_TO(r_regionkey, 'INT_BE') FROM cp.`tpch/region.parquet`";
+ try {
+ setSessionOption(LATE_LIMIT0_OPT_KEY, false);
+ PlanTestBase.testPlanMatchingPatterns(wrapLimit0(query), null, new String[] {".*Limit\\(offset=\\[0\\], fetch=\\[0\\]\\)(.*[\n\r])+.*Scan.*"});
+ } finally {
+ resetSessionOption(LATE_LIMIT0_OPT_KEY);
+ }
+ }
}