You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ka...@apache.org on 2017/07/07 08:38:41 UTC
[39/50] [abbrv] kylin git commit: minor,
rename adhoc to pushdown in KYLIN
minor, rename adhoc to pushdown in KYLIN
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7002dd86
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7002dd86
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7002dd86
Branch: refs/heads/KYLIN-2606
Commit: 7002dd864a3f43a2e97a022d1d348773609f446d
Parents: d91f522
Author: Cheng Wang <ch...@kyligence.io>
Authored: Fri Jun 30 21:12:54 2017 +0800
Committer: liyang-gmt8 <li...@apache.org>
Committed: Fri Jun 30 21:18:10 2017 +0800
----------------------------------------------------------------------
.../apache/kylin/common/KylinConfigBase.java | 28 +-
.../main/resources/kylin-defaults.properties | 18 +-
.../kylin/metadata/project/ProjectManager.java | 8 +-
.../source/adhocquery/HiveAdhocConverter.java | 297 -------------------
.../adhocquery/HivePushDownConverter.java | 297 +++++++++++++++++++
.../source/adhocquery/IAdHocConverter.java | 25 --
.../kylin/source/adhocquery/IAdHocRunner.java | 39 ---
.../source/adhocquery/IPushDownConverter.java | 25 ++
.../source/adhocquery/IPushDownRunner.java | 39 +++
.../adhocquery/HiveAdhocConverterTest.java | 78 -----
.../adhocquery/HivePushDownConverterTest.java | 78 +++++
.../test_case_data/sandbox/kylin.properties | 18 +-
.../org/apache/kylin/query/KylinTestBase.java | 4 +-
.../kylin/query/adhoc/AdHocRunnerJdbcImpl.java | 122 --------
.../query/adhoc/PushDownRunnerJdbcImpl.java | 122 ++++++++
.../apache/kylin/rest/response/SQLResponse.java | 17 +-
.../apache/kylin/rest/service/QueryService.java | 150 +++++++---
.../org/apache/kylin/rest/util/AdHocUtil.java | 178 -----------
.../apache/kylin/rest/util/PushDownUtil.java | 177 +++++++++++
.../apache/kylin/rest/util/AdHocUtilTest.java | 94 ------
.../kylin/rest/util/PushDownUtilTest.java | 94 ++++++
21 files changed, 986 insertions(+), 922 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 4a21ded..179d61f 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -1022,45 +1022,45 @@ abstract public class KylinConfigBase implements Serializable {
return Integer.parseInt(this.getOptional("kylin.query.timeout-seconds", "0"));
}
- public boolean isAdhocEnabled() {
- return StringUtils.isNotEmpty(getAdHocRunnerClassName());
+ public boolean isPushDownEnabled() {
+ return StringUtils.isNotEmpty(getPushDownRunnerClassName());
}
- public String getAdHocRunnerClassName() {
- return getOptional("kylin.query.ad-hoc.runner-class-name", "");
+ public String getPushDownRunnerClassName() {
+ return getOptional("kylin.query.pushdown.runner-class-name", "");
}
- public String getAdHocConverterClassName() {
- return getOptional("kylin.query.ad-hoc.converter-class-name",
- "org.apache.kylin.source.adhocquery.HiveAdhocConverter");
+ public String getPushDownConverterClassName() {
+ return getOptional("kylin.query.pushdown.converter-class-name",
+ "org.apache.kylin.source.adhocquery.HivePushDownConverter");
}
public String getJdbcUrl() {
- return getOptional("kylin.query.ad-hoc.jdbc.url", "");
+ return getOptional("kylin.query.pushdown.jdbc.url", "");
}
public String getJdbcDriverClass() {
- return getOptional("kylin.query.ad-hoc.jdbc.driver", "");
+ return getOptional("kylin.query.pushdown.jdbc.driver", "");
}
public String getJdbcUsername() {
- return getOptional("kylin.query.ad-hoc.jdbc.username", "");
+ return getOptional("kylin.query.pushdown.jdbc.username", "");
}
public String getJdbcPassword() {
- return getOptional("kylin.query.ad-hoc.jdbc.password", "");
+ return getOptional("kylin.query.pushdown.jdbc.password", "");
}
public int getPoolMaxTotal() {
- return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-max-total", "8"));
+ return Integer.parseInt(this.getOptional("kylin.query.pushdown.jdbc.pool-max-total", "8"));
}
public int getPoolMaxIdle() {
- return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-max-idle", "8"));
+ return Integer.parseInt(this.getOptional("kylin.query.pushdown.jdbc.pool-max-idle", "8"));
}
public int getPoolMinIdle() {
- return Integer.parseInt(this.getOptional("kylin.query.ad-hoc.jdbc.pool-min-idle", "0"));
+ return Integer.parseInt(this.getOptional("kylin.query.pushdown.jdbc.pool-min-idle", "0"));
}
// ============================================================================
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-common/src/main/resources/kylin-defaults.properties
----------------------------------------------------------------------
diff --git a/core-common/src/main/resources/kylin-defaults.properties b/core-common/src/main/resources/kylin-defaults.properties
index beac63c..cb511e7 100644
--- a/core-common/src/main/resources/kylin-defaults.properties
+++ b/core-common/src/main/resources/kylin-defaults.properties
@@ -237,15 +237,15 @@ kylin.engine.spark-conf.spark.history.fs.logDirectory=hdfs\:///kylin/spark-histo
#kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
-### AD-HOC QUERY ###
+### QUERY PUSH DOWN ###
-#kylin.query.ad-hoc.runner-class-name=org.apache.kylin.query.adhoc.AdHocRunnerJdbcImpl
+#kylin.query.pushdown.runner-class-name=org.apache.kylin.query.adhoc.PushDownRunnerJdbcImpl
-#kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
-#kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
-#kylin.query.ad-hoc.jdbc.username=hive
-#kylin.query.ad-hoc.jdbc.password=
+#kylin.query.pushdown.jdbc.url=jdbc:hive2://sandbox:10000/default
+#kylin.query.pushdown.jdbc.driver=org.apache.hive.jdbc.HiveDriver
+#kylin.query.pushdown.jdbc.username=hive
+#kylin.query.pushdown.jdbc.password=
-#kylin.query.ad-hoc.jdbc.pool-max-total=8
-#kylin.query.ad-hoc.jdbc.pool-max-idle=8
-#kylin.query.ad-hoc.jdbc.pool-min-idle=0
+#kylin.query.pushdown.jdbc.pool-max-total=8
+#kylin.query.pushdown.jdbc.pool-max-idle=8
+#kylin.query.pushdown.jdbc.pool-min-idle=0
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
index ea03c3c..b801d8c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/project/ProjectManager.java
@@ -437,7 +437,7 @@ public class ProjectManager {
}
public Collection<TableDesc> listExposedTables(String project) {
- return config.isAdhocEnabled() ? //
+ return config.isPushDownEnabled() ? //
this.listDefinedTables(project) : //
l2Cache.listExposedTables(norm(project));
}
@@ -445,7 +445,7 @@ public class ProjectManager {
public List<ColumnDesc> listExposedColumns(String project, TableDesc tableDesc) {
Set<ColumnDesc> exposedColumns = l2Cache.listExposedColumns(norm(project), tableDesc.getIdentity());
- if (config.isAdhocEnabled()) {
+ if (config.isPushDownEnabled()) {
// take care of computed columns
Set<ColumnDesc> dedup = Sets.newHashSet(tableDesc.getColumns());
dedup.addAll(exposedColumns);
@@ -456,13 +456,13 @@ public class ProjectManager {
}
public boolean isExposedTable(String project, String table) {
- return config.isAdhocEnabled() ? //
+ return config.isPushDownEnabled() ? //
l2Cache.isDefinedTable(norm(project), table) : //
l2Cache.isExposedTable(norm(project), table);
}
public boolean isExposedColumn(String project, String table, String col) {
- return config.isAdhocEnabled() ? //
+ return config.isPushDownEnabled() ? //
l2Cache.isDefinedColumn(norm(project), table, col) || l2Cache.isExposedColumn(norm(project), table, col)
: //
l2Cache.isExposedColumn(norm(project), table, col);
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
deleted file mode 100644
index e8c06ff..0000000
--- a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HiveAdhocConverter.java
+++ /dev/null
@@ -1,297 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-package org.apache.kylin.source.adhocquery;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Stack;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.ImmutableSet;
-
-//TODO: Some workaround ways to make sql readable by hive parser, should replaced it with a more well-designed way
-public class HiveAdhocConverter implements IAdHocConverter {
-
- @SuppressWarnings("unused")
- private static final Logger logger = LoggerFactory.getLogger(HiveAdhocConverter.class);
-
- private static final Pattern EXTRACT_PATTERN = Pattern.compile("extract\\s*(\\()\\s*(.*?)\\s*from(\\s+)",
- Pattern.CASE_INSENSITIVE);
- private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\()\\s*select\\s",
- Pattern.CASE_INSENSITIVE);
- private static final Pattern ALIAS_PATTERN = Pattern.compile("\\s+([`'_a-z0-9A-Z]+)", Pattern.CASE_INSENSITIVE);
- private static final Pattern CAST_PATTERN = Pattern.compile("CAST\\((.*?) (?i)AS\\s*(.*?)\\s*\\)",
- Pattern.CASE_INSENSITIVE);
- private static final Pattern CONCAT_PATTERN = Pattern.compile("(['_a-z0-9A-Z]+)\\|\\|(['_a-z0-9A-Z]+)",
- Pattern.CASE_INSENSITIVE);
- private static final Pattern TIMESTAMPADD_PATTERN = Pattern.compile("timestampadd\\s*\\(\\s*(.*?)\\s*,",
- Pattern.CASE_INSENSITIVE);
- private static final ImmutableSet<String> sqlKeyWordsExceptAS = ImmutableSet.of("A", "ABS", "ABSOLUTE", "ACTION",
- "ADA", "ADD", "ADMIN", "AFTER", "ALL", "ALLOCATE", "ALLOW", "ALTER", "ALWAYS", "AND", "ANY", "APPLY", "ARE",
- "ARRAY", "ARRAY_MAX_CARDINALITY", "ASC", "ASENSITIVE", "ASSERTION", "ASSIGNMENT", "ASYMMETRIC", "AT",
- "ATOMIC", "ATTRIBUTE", "ATTRIBUTES", "AUTHORIZATION", "AVG", "BEFORE", "BEGIN", "BEGIN_FRAME",
- "BEGIN_PARTITION", "BERNOULLI", "BETWEEN", "BIGINT", "BINARY", "BIT", "BLOB", "BOOLEAN", "BOTH", "BREADTH",
- "BY", "C", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", "CASE", "CAST", "CATALOG",
- "CATALOG_NAME", "CEIL", "CEILING", "CENTURY", "CHAIN", "CHAR", "CHARACTER", "CHARACTERISTICS", "CHARACTERS",
- "CHARACTER_LENGTH", "CHARACTER_SET_CATALOG", "CHARACTER_SET_NAME", "CHARACTER_SET_SCHEMA", "CHAR_LENGTH",
- "CHECK", "CLASSIFIER", "CLASS_ORIGIN", "CLOB", "CLOSE", "COALESCE", "COBOL", "COLLATE", "COLLATION",
- "COLLATION_CATALOG", "COLLATION_NAME", "COLLATION_SCHEMA", "COLLECT", "COLUMN", "COLUMN_NAME",
- "COMMAND_FUNCTION", "COMMAND_FUNCTION_CODE", "COMMIT", "COMMITTED", "CONDITION", "CONDITION_NUMBER",
- "CONNECT", "CONNECTION", "CONNECTION_NAME", "CONSTRAINT", "CONSTRAINTS", "CONSTRAINT_CATALOG",
- "CONSTRAINT_NAME", "CONSTRAINT_SCHEMA", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CONVERT", "CORR",
- "CORRESPONDING", "COUNT", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT",
- "CURRENT_CATALOG", "CURRENT_DATE", "CURRENT_DEFAULT_TRANSFORM_GROUP", "CURRENT_PATH", "CURRENT_ROLE",
- "CURRENT_ROW", "CURRENT_SCHEMA", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_TRANSFORM_GROUP_FOR_TYPE",
- "CURRENT_USER", "CURSOR", "CURSOR_NAME", "CYCLE", "DATA", "DATABASE", "DATE", "DATETIME_INTERVAL_CODE",
- "DATETIME_INTERVAL_PRECISION", "DAY", "DEALLOCATE", "DEC", "DECADE", "DECIMAL", "DECLARE", "DEFAULT",
- "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINE", "DEFINED", "DEFINER", "DEGREE", "DELETE", "DENSE_RANK",
- "DEPTH", "DEREF", "DERIVED", "DESC", "DESCRIBE", "DESCRIPTION", "DESCRIPTOR", "DETERMINISTIC",
- "DIAGNOSTICS", "DISALLOW", "DISCONNECT", "DISPATCH", "DISTINCT", "DOMAIN", "DOUBLE", "DOW", "DOY", "DROP",
- "DYNAMIC", "DYNAMIC_FUNCTION", "DYNAMIC_FUNCTION_CODE", "EACH", "ELEMENT", "ELSE", "EMPTY", "END",
- "END-EXEC", "END_FRAME", "END_PARTITION", "EPOCH", "EQUALS", "ESCAPE", "EVERY", "EXCEPT", "EXCEPTION",
- "EXCLUDE", "EXCLUDING", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXPLAIN", "EXTEND", "EXTERNAL", "EXTRACT",
- "FALSE", "FETCH", "FILTER", "FINAL", "FIRST", "FIRST_VALUE", "FLOAT", "FLOOR", "FOLLOWING", "FOR",
- "FOREIGN", "FORTRAN", "FOUND", "FRAC_SECOND", "FRAME_ROW", "FREE", "FROM", "FULL", "FUNCTION", "FUSION",
- "G", "GENERAL", "GENERATED", "GET", "GLOBAL", "GO", "GOTO", "GRANT", "GRANTED", "GROUP", "GROUPING",
- "GROUPS", "HAVING", "HIERARCHY", "HOLD", "HOUR", "IDENTITY", "IMMEDIATE", "IMMEDIATELY", "IMPLEMENTATION",
- "IMPORT", "IN", "INCLUDING", "INCREMENT", "INDICATOR", "INITIAL", "INITIALLY", "INNER", "INOUT", "INPUT",
- "INSENSITIVE", "INSERT", "INSTANCE", "INSTANTIABLE", "INT", "INTEGER", "INTERSECT", "INTERSECTION",
- "INTERVAL", "INTO", "INVOKER", "IS", "ISOLATION", "JAVA", "JOIN", "JSON", "K", "KEY", "KEY_MEMBER",
- "KEY_TYPE", "LABEL", "LAG", "LANGUAGE", "LARGE", "LAST", "LAST_VALUE", "LATERAL", "LEAD", "LEADING", "LEFT",
- "LENGTH", "LEVEL", "LIBRARY", "LIKE", "LIKE_REGEX", "LIMIT", "LN", "LOCAL", "LOCALTIME", "LOCALTIMESTAMP",
- "LOCATOR", "LOWER", "M", "MAP", "MATCH", "MATCHED", "MATCHES", "MATCH_NUMBER", "MATCH_RECOGNIZE", "MAX",
- "MAXVALUE", "MEASURES", "MEMBER", "MERGE", "MESSAGE_LENGTH", "MESSAGE_OCTET_LENGTH", "MESSAGE_TEXT",
- "METHOD", "MICROSECOND", "MILLENNIUM", "MIN", "MINUS", "MINUTE", "MINVALUE", "MOD", "MODIFIES", "MODULE",
- "MONTH", "MORE", "MULTISET", "MUMPS", "NAME", "NAMES", "NATIONAL", "NATURAL", "NCHAR", "NCLOB", "NESTING",
- "NEW", "NEXT", "NO", "NONE", "NORMALIZE", "NORMALIZED", "NOT", "NTH_VALUE", "NTILE", "NULL", "NULLABLE",
- "NULLIF", "NULLS", "NUMBER", "NUMERIC", "OBJECT", "OCCURRENCES_REGEX", "OCTETS", "OCTET_LENGTH", "OF",
- "OFFSET", "OLD", "OMIT", "ON", "ONE", "ONLY", "OPEN", "OPTION", "OPTIONS", "OR", "ORDER", "ORDERING",
- "ORDINALITY", "OTHERS", "OUT", "OUTER", "OUTPUT", "OVER", "OVERLAPS", "OVERLAY", "OVERRIDING", "PAD",
- "PARAMETER", "PARAMETER_MODE", "PARAMETER_NAME", "PARAMETER_ORDINAL_POSITION", "PARAMETER_SPECIFIC_CATALOG",
- "PARAMETER_SPECIFIC_NAME", "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", "PARTITION", "PASCAL", "PASSTHROUGH",
- "PAST", "PATH", "PATTERN", "PER", "PERCENT", "PERCENTILE_CONT", "PERCENTILE_DISC", "PERCENT_RANK", "PERIOD",
- "PERMUTE", "PLACING", "PLAN", "PLI", "PORTION", "POSITION", "POSITION_REGEX", "POWER", "PRECEDES",
- "PRECEDING", "PRECISION", "PREPARE", "PRESERVE", "PREV", "PRIMARY", "PRIOR", "PRIVILEGES", "PROCEDURE",
- "PUBLIC", "QUARTER", "RANGE", "RANK", "READ", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES",
- "REFERENCING", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE",
- "REGR_SXX", "REGR_SXY", "REGR_SYY", "RELATIVE", "RELEASE", "REPEATABLE", "REPLACE", "RESET", "RESTART",
- "RESTRICT", "RESULT", "RETURN", "RETURNED_CARDINALITY", "RETURNED_LENGTH", "RETURNED_OCTET_LENGTH",
- "RETURNED_SQLSTATE", "RETURNS", "REVOKE", "RIGHT", "ROLE", "ROLLBACK", "ROLLUP", "ROUTINE",
- "ROUTINE_CATALOG", "ROUTINE_NAME", "ROUTINE_SCHEMA", "ROW", "ROWS", "ROW_COUNT", "ROW_NUMBER", "RUNNING",
- "SAVEPOINT", "SCALE", "SCHEMA", "SCHEMA_NAME", "SCOPE", "SCOPE_CATALOGS", "SCOPE_NAME", "SCOPE_SCHEMA",
- "SCROLL", "SEARCH", "SECOND", "SECTION", "SECURITY", "SEEK", "SELECT", "SELF", "SENSITIVE", "SEQUENCE",
- "SERIALIZABLE", "SERVER", "SERVER_NAME", "SESSION", "SESSION_USER", "SET", "SETS", "SHOW", "SIMILAR",
- "SIMPLE", "SIZE", "SKIP", "SMALLINT", "SOME", "SOURCE", "SPACE", "SPECIFIC", "SPECIFICTYPE",
- "SPECIFIC_NAME", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIGINT", "SQL_BINARY", "SQL_BIT",
- "SQL_BLOB", "SQL_BOOLEAN", "SQL_CHAR", "SQL_CLOB", "SQL_DATE", "SQL_DECIMAL", "SQL_DOUBLE", "SQL_FLOAT",
- "SQL_INTEGER", "SQL_INTERVAL_DAY", "SQL_INTERVAL_DAY_TO_HOUR", "SQL_INTERVAL_DAY_TO_MINUTE",
- "SQL_INTERVAL_DAY_TO_SECOND", "SQL_INTERVAL_HOUR", "SQL_INTERVAL_HOUR_TO_MINUTE",
- "SQL_INTERVAL_HOUR_TO_SECOND", "SQL_INTERVAL_MINUTE", "SQL_INTERVAL_MINUTE_TO_SECOND", "SQL_INTERVAL_MONTH",
- "SQL_INTERVAL_SECOND", "SQL_INTERVAL_YEAR", "SQL_INTERVAL_YEAR_TO_MONTH", "SQL_LONGVARBINARY",
- "SQL_LONGVARCHAR", "SQL_LONGVARNCHAR", "SQL_NCHAR", "SQL_NCLOB", "SQL_NUMERIC", "SQL_NVARCHAR", "SQL_REAL",
- "SQL_SMALLINT", "SQL_TIME", "SQL_TIMESTAMP", "SQL_TINYINT", "SQL_TSI_DAY", "SQL_TSI_FRAC_SECOND",
- "SQL_TSI_HOUR", "SQL_TSI_MICROSECOND", "SQL_TSI_MINUTE", "SQL_TSI_MONTH", "SQL_TSI_QUARTER",
- "SQL_TSI_SECOND", "SQL_TSI_WEEK", "SQL_TSI_YEAR", "SQL_VARBINARY", "SQL_VARCHAR", "SQRT", "START", "STATE",
- "STATEMENT", "STATIC", "STDDEV_POP", "STDDEV_SAMP", "STREAM", "STRUCTURE", "STYLE", "SUBCLASS_ORIGIN",
- "SUBMULTISET", "SUBSET", "SUBSTITUTE", "SUBSTRING", "SUBSTRING_REGEX", "SUCCEEDS", "SUM", "SYMMETRIC",
- "SYSTEM", "SYSTEM_TIME", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "TABLE_NAME", "TEMPORARY", "THEN", "TIES",
- "TIME", "TIMESTAMP", "TIMESTAMPADD", "TIMESTAMPDIFF", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TINYINT", "TO",
- "TOP_LEVEL_COUNT", "TRAILING", "TRANSACTION", "TRANSACTIONS_ACTIVE", "TRANSACTIONS_COMMITTED",
- "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", "TRANSLATE", "TRANSLATE_REGEX", "TRANSLATION",
- "TREAT", "TRIGGER", "TRIGGER_CATALOG", "TRIGGER_NAME", "TRIGGER_SCHEMA", "TRIM", "TRIM_ARRAY", "TRUE",
- "TRUNCATE", "TYPE", "UESCAPE", "UNBOUNDED", "UNCOMMITTED", "UNDER", "UNION", "UNIQUE", "UNKNOWN", "UNNAMED",
- "UNNEST", "UPDATE", "UPPER", "UPSERT", "USAGE", "USER", "USER_DEFINED_TYPE_CATALOG",
- "USER_DEFINED_TYPE_CODE", "USER_DEFINED_TYPE_NAME", "USER_DEFINED_TYPE_SCHEMA", "USING", "VALUE", "VALUES",
- "VALUE_OF", "VARBINARY", "VARCHAR", "VARYING", "VAR_POP", "VAR_SAMP", "VERSION", "VERSIONING", "VIEW",
- "WEEK", "WHEN", "WHENEVER", "WHERE", "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "WORK",
- "WRAPPER", "WRITE", "XML", "YEAR", "ZONE");
-
- public static String replaceString(String originString, String fromString, String toString) {
- return originString.replace(fromString, toString);
- }
-
- public static String extractReplace(String originString) {
- Matcher extractMatcher = EXTRACT_PATTERN.matcher(originString);
- String replacedString = originString;
- Map<Integer, Integer> parenthesesPairs = null;
-
- while (extractMatcher.find()) {
- if (parenthesesPairs == null) {
- parenthesesPairs = findParenthesesPairs(originString);
- }
-
- String functionStr = extractMatcher.group(2);
- int startIdx = extractMatcher.end(3);
- int endIdx = parenthesesPairs.get(extractMatcher.start(1));
- String extractInner = originString.substring(startIdx, endIdx);
- int originStart = extractMatcher.start(0);
- int originEnd = endIdx + 1;
-
- replacedString = replaceString(replacedString, originString.substring(originStart, originEnd),
- functionStr + "(" + extractInner + ")");
- }
-
- return replacedString;
- }
-
- public static String castReplace(String originString) {
- Matcher castMatcher = CAST_PATTERN.matcher(originString);
- String replacedString = originString;
-
- while (castMatcher.find()) {
- String castStr = castMatcher.group();
- String type = castMatcher.group(2);
- String supportedType = "";
- switch (type.toUpperCase()) {
- case "INTEGER":
- supportedType = "int";
- break;
- case "SHORT":
- supportedType = "smallint";
- break;
- case "LONG":
- supportedType = "bigint";
- break;
- default:
- supportedType = type;
- }
-
- if (!supportedType.equals(type)) {
- String replacedCastStr = castStr.replace(type, supportedType);
- replacedString = replaceString(replacedString, castStr, replacedCastStr);
- }
- }
-
- return replacedString;
- }
-
- public static String subqueryReplace(String originString) {
- Matcher subqueryMatcher = FROM_PATTERN.matcher(originString);
- String replacedString = originString;
- Map<Integer, Integer> parenthesesPairs = null;
-
- while (subqueryMatcher.find()) {
- if (parenthesesPairs == null) {
- parenthesesPairs = findParenthesesPairs(originString);
- }
-
- int startIdx = subqueryMatcher.start(1);
- int endIdx = parenthesesPairs.get(startIdx) + 1;
-
- Matcher aliasMatcher = ALIAS_PATTERN.matcher(originString.substring(endIdx));
- if (aliasMatcher.find()) {
- String aliasCandidate = aliasMatcher.group(1);
-
- if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase())) {
- continue;
- }
-
- replacedString = replaceString(replacedString, originString.substring(startIdx, endIdx),
- originString.substring(startIdx, endIdx) + " as alias");
- }
- }
-
- return replacedString;
- }
-
- public static String timestampaddReplace(String originString) {
- Matcher timestampaddMatcher = TIMESTAMPADD_PATTERN.matcher(originString);
- String replacedString = originString;
-
- while (timestampaddMatcher.find()) {
- String interval = timestampaddMatcher.group(1);
- String timestampaddStr = replaceString(timestampaddMatcher.group(), interval, "'" + interval + "'");
- replacedString = replaceString(replacedString, timestampaddMatcher.group(), timestampaddStr);
- }
-
- return replacedString;
- }
-
- public static String concatReplace(String originString) {
- Matcher concatMatcher = CONCAT_PATTERN.matcher(originString);
- String replacedString = originString;
-
- while (concatMatcher.find()) {
- String leftString = concatMatcher.group(1);
- String rightString = concatMatcher.group(2);
- replacedString = replaceString(replacedString, leftString + "||" + rightString,
- "concat(" + leftString + "," + rightString + ")");
- }
-
- return replacedString;
- }
-
- public static String doConvert(String originStr) {
- // Step1.Replace " with `
- String convertedSql = replaceString(originStr, "\"", "`");
-
- // Step2.Replace extract functions
- convertedSql = extractReplace(convertedSql);
-
- // Step3.Replace cast type string
- convertedSql = castReplace(convertedSql);
-
- // Step4.Replace sub query
- convertedSql = subqueryReplace(convertedSql);
-
- // Step5.Replace char_length with length
- convertedSql = replaceString(convertedSql, "char_length", "length");
-
- // Step6.Replace "||" with concat
- convertedSql = concatReplace(convertedSql);
-
- // Step7.Add quote for interval in timestampadd
- convertedSql = timestampaddReplace(convertedSql);
-
- return convertedSql;
- }
-
- private static Map<Integer, Integer> findParenthesesPairs(String sql) {
- Map<Integer, Integer> result = new HashMap<>();
- if (sql.length() > 1) {
- Stack<Integer> lStack = new Stack<>();
- boolean inStrVal = false;
- for (int i = 0; i < sql.length(); i++) {
- switch (sql.charAt(i)) {
- case '(':
- if (!inStrVal) {
- lStack.push(i);
- }
- break;
- case ')':
- if (!inStrVal && !lStack.empty()) {
- result.put(lStack.pop(), i);
- }
- break;
- default:
- break;
- }
- }
- }
- return result;
- }
-
- @Override
- public String convert(String originSql) {
- return doConvert(originSql);
- }
-}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
new file mode 100644
index 0000000..8d89294
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/HivePushDownConverter.java
@@ -0,0 +1,297 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.adhocquery;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableSet;
+
+//TODO: Some workaround ways to make sql readable by hive parser, should replaced it with a more well-designed way
+public class HivePushDownConverter implements IPushDownConverter {
+
+ @SuppressWarnings("unused")
+ private static final Logger logger = LoggerFactory.getLogger(HivePushDownConverter.class);
+
+ private static final Pattern EXTRACT_PATTERN = Pattern.compile("extract\\s*(\\()\\s*(.*?)\\s*from(\\s+)",
+ Pattern.CASE_INSENSITIVE);
+ private static final Pattern FROM_PATTERN = Pattern.compile("\\s+from\\s+(\\()\\s*select\\s",
+ Pattern.CASE_INSENSITIVE);
+ private static final Pattern ALIAS_PATTERN = Pattern.compile("\\s+([`'_a-z0-9A-Z]+)", Pattern.CASE_INSENSITIVE);
+ private static final Pattern CAST_PATTERN = Pattern.compile("CAST\\((.*?) (?i)AS\\s*(.*?)\\s*\\)",
+ Pattern.CASE_INSENSITIVE);
+ private static final Pattern CONCAT_PATTERN = Pattern.compile("(['_a-z0-9A-Z]+)\\|\\|(['_a-z0-9A-Z]+)",
+ Pattern.CASE_INSENSITIVE);
+ private static final Pattern TIMESTAMPADD_PATTERN = Pattern.compile("timestampadd\\s*\\(\\s*(.*?)\\s*,",
+ Pattern.CASE_INSENSITIVE);
+ private static final ImmutableSet<String> sqlKeyWordsExceptAS = ImmutableSet.of("A", "ABS", "ABSOLUTE", "ACTION",
+ "ADA", "ADD", "ADMIN", "AFTER", "ALL", "ALLOCATE", "ALLOW", "ALTER", "ALWAYS", "AND", "ANY", "APPLY", "ARE",
+ "ARRAY", "ARRAY_MAX_CARDINALITY", "ASC", "ASENSITIVE", "ASSERTION", "ASSIGNMENT", "ASYMMETRIC", "AT",
+ "ATOMIC", "ATTRIBUTE", "ATTRIBUTES", "AUTHORIZATION", "AVG", "BEFORE", "BEGIN", "BEGIN_FRAME",
+ "BEGIN_PARTITION", "BERNOULLI", "BETWEEN", "BIGINT", "BINARY", "BIT", "BLOB", "BOOLEAN", "BOTH", "BREADTH",
+ "BY", "C", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", "CASE", "CAST", "CATALOG",
+ "CATALOG_NAME", "CEIL", "CEILING", "CENTURY", "CHAIN", "CHAR", "CHARACTER", "CHARACTERISTICS", "CHARACTERS",
+ "CHARACTER_LENGTH", "CHARACTER_SET_CATALOG", "CHARACTER_SET_NAME", "CHARACTER_SET_SCHEMA", "CHAR_LENGTH",
+ "CHECK", "CLASSIFIER", "CLASS_ORIGIN", "CLOB", "CLOSE", "COALESCE", "COBOL", "COLLATE", "COLLATION",
+ "COLLATION_CATALOG", "COLLATION_NAME", "COLLATION_SCHEMA", "COLLECT", "COLUMN", "COLUMN_NAME",
+ "COMMAND_FUNCTION", "COMMAND_FUNCTION_CODE", "COMMIT", "COMMITTED", "CONDITION", "CONDITION_NUMBER",
+ "CONNECT", "CONNECTION", "CONNECTION_NAME", "CONSTRAINT", "CONSTRAINTS", "CONSTRAINT_CATALOG",
+ "CONSTRAINT_NAME", "CONSTRAINT_SCHEMA", "CONSTRUCTOR", "CONTAINS", "CONTINUE", "CONVERT", "CORR",
+ "CORRESPONDING", "COUNT", "COVAR_POP", "COVAR_SAMP", "CREATE", "CROSS", "CUBE", "CUME_DIST", "CURRENT",
+ "CURRENT_CATALOG", "CURRENT_DATE", "CURRENT_DEFAULT_TRANSFORM_GROUP", "CURRENT_PATH", "CURRENT_ROLE",
+ "CURRENT_ROW", "CURRENT_SCHEMA", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_TRANSFORM_GROUP_FOR_TYPE",
+ "CURRENT_USER", "CURSOR", "CURSOR_NAME", "CYCLE", "DATA", "DATABASE", "DATE", "DATETIME_INTERVAL_CODE",
+ "DATETIME_INTERVAL_PRECISION", "DAY", "DEALLOCATE", "DEC", "DECADE", "DECIMAL", "DECLARE", "DEFAULT",
+ "DEFAULTS", "DEFERRABLE", "DEFERRED", "DEFINE", "DEFINED", "DEFINER", "DEGREE", "DELETE", "DENSE_RANK",
+ "DEPTH", "DEREF", "DERIVED", "DESC", "DESCRIBE", "DESCRIPTION", "DESCRIPTOR", "DETERMINISTIC",
+ "DIAGNOSTICS", "DISALLOW", "DISCONNECT", "DISPATCH", "DISTINCT", "DOMAIN", "DOUBLE", "DOW", "DOY", "DROP",
+ "DYNAMIC", "DYNAMIC_FUNCTION", "DYNAMIC_FUNCTION_CODE", "EACH", "ELEMENT", "ELSE", "EMPTY", "END",
+ "END-EXEC", "END_FRAME", "END_PARTITION", "EPOCH", "EQUALS", "ESCAPE", "EVERY", "EXCEPT", "EXCEPTION",
+ "EXCLUDE", "EXCLUDING", "EXEC", "EXECUTE", "EXISTS", "EXP", "EXPLAIN", "EXTEND", "EXTERNAL", "EXTRACT",
+ "FALSE", "FETCH", "FILTER", "FINAL", "FIRST", "FIRST_VALUE", "FLOAT", "FLOOR", "FOLLOWING", "FOR",
+ "FOREIGN", "FORTRAN", "FOUND", "FRAC_SECOND", "FRAME_ROW", "FREE", "FROM", "FULL", "FUNCTION", "FUSION",
+ "G", "GENERAL", "GENERATED", "GET", "GLOBAL", "GO", "GOTO", "GRANT", "GRANTED", "GROUP", "GROUPING",
+ "GROUPS", "HAVING", "HIERARCHY", "HOLD", "HOUR", "IDENTITY", "IMMEDIATE", "IMMEDIATELY", "IMPLEMENTATION",
+ "IMPORT", "IN", "INCLUDING", "INCREMENT", "INDICATOR", "INITIAL", "INITIALLY", "INNER", "INOUT", "INPUT",
+ "INSENSITIVE", "INSERT", "INSTANCE", "INSTANTIABLE", "INT", "INTEGER", "INTERSECT", "INTERSECTION",
+ "INTERVAL", "INTO", "INVOKER", "IS", "ISOLATION", "JAVA", "JOIN", "JSON", "K", "KEY", "KEY_MEMBER",
+ "KEY_TYPE", "LABEL", "LAG", "LANGUAGE", "LARGE", "LAST", "LAST_VALUE", "LATERAL", "LEAD", "LEADING", "LEFT",
+ "LENGTH", "LEVEL", "LIBRARY", "LIKE", "LIKE_REGEX", "LIMIT", "LN", "LOCAL", "LOCALTIME", "LOCALTIMESTAMP",
+ "LOCATOR", "LOWER", "M", "MAP", "MATCH", "MATCHED", "MATCHES", "MATCH_NUMBER", "MATCH_RECOGNIZE", "MAX",
+ "MAXVALUE", "MEASURES", "MEMBER", "MERGE", "MESSAGE_LENGTH", "MESSAGE_OCTET_LENGTH", "MESSAGE_TEXT",
+ "METHOD", "MICROSECOND", "MILLENNIUM", "MIN", "MINUS", "MINUTE", "MINVALUE", "MOD", "MODIFIES", "MODULE",
+ "MONTH", "MORE", "MULTISET", "MUMPS", "NAME", "NAMES", "NATIONAL", "NATURAL", "NCHAR", "NCLOB", "NESTING",
+ "NEW", "NEXT", "NO", "NONE", "NORMALIZE", "NORMALIZED", "NOT", "NTH_VALUE", "NTILE", "NULL", "NULLABLE",
+ "NULLIF", "NULLS", "NUMBER", "NUMERIC", "OBJECT", "OCCURRENCES_REGEX", "OCTETS", "OCTET_LENGTH", "OF",
+ "OFFSET", "OLD", "OMIT", "ON", "ONE", "ONLY", "OPEN", "OPTION", "OPTIONS", "OR", "ORDER", "ORDERING",
+ "ORDINALITY", "OTHERS", "OUT", "OUTER", "OUTPUT", "OVER", "OVERLAPS", "OVERLAY", "OVERRIDING", "PAD",
+ "PARAMETER", "PARAMETER_MODE", "PARAMETER_NAME", "PARAMETER_ORDINAL_POSITION", "PARAMETER_SPECIFIC_CATALOG",
+ "PARAMETER_SPECIFIC_NAME", "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", "PARTITION", "PASCAL", "PASSTHROUGH",
+ "PAST", "PATH", "PATTERN", "PER", "PERCENT", "PERCENTILE_CONT", "PERCENTILE_DISC", "PERCENT_RANK", "PERIOD",
+ "PERMUTE", "PLACING", "PLAN", "PLI", "PORTION", "POSITION", "POSITION_REGEX", "POWER", "PRECEDES",
+ "PRECEDING", "PRECISION", "PREPARE", "PRESERVE", "PREV", "PRIMARY", "PRIOR", "PRIVILEGES", "PROCEDURE",
+ "PUBLIC", "QUARTER", "RANGE", "RANK", "READ", "READS", "REAL", "RECURSIVE", "REF", "REFERENCES",
+ "REFERENCING", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT", "REGR_R2", "REGR_SLOPE",
+ "REGR_SXX", "REGR_SXY", "REGR_SYY", "RELATIVE", "RELEASE", "REPEATABLE", "REPLACE", "RESET", "RESTART",
+ "RESTRICT", "RESULT", "RETURN", "RETURNED_CARDINALITY", "RETURNED_LENGTH", "RETURNED_OCTET_LENGTH",
+ "RETURNED_SQLSTATE", "RETURNS", "REVOKE", "RIGHT", "ROLE", "ROLLBACK", "ROLLUP", "ROUTINE",
+ "ROUTINE_CATALOG", "ROUTINE_NAME", "ROUTINE_SCHEMA", "ROW", "ROWS", "ROW_COUNT", "ROW_NUMBER", "RUNNING",
+ "SAVEPOINT", "SCALE", "SCHEMA", "SCHEMA_NAME", "SCOPE", "SCOPE_CATALOGS", "SCOPE_NAME", "SCOPE_SCHEMA",
+ "SCROLL", "SEARCH", "SECOND", "SECTION", "SECURITY", "SEEK", "SELECT", "SELF", "SENSITIVE", "SEQUENCE",
+ "SERIALIZABLE", "SERVER", "SERVER_NAME", "SESSION", "SESSION_USER", "SET", "SETS", "SHOW", "SIMILAR",
+ "SIMPLE", "SIZE", "SKIP", "SMALLINT", "SOME", "SOURCE", "SPACE", "SPECIFIC", "SPECIFICTYPE",
+ "SPECIFIC_NAME", "SQL", "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIGINT", "SQL_BINARY", "SQL_BIT",
+ "SQL_BLOB", "SQL_BOOLEAN", "SQL_CHAR", "SQL_CLOB", "SQL_DATE", "SQL_DECIMAL", "SQL_DOUBLE", "SQL_FLOAT",
+ "SQL_INTEGER", "SQL_INTERVAL_DAY", "SQL_INTERVAL_DAY_TO_HOUR", "SQL_INTERVAL_DAY_TO_MINUTE",
+ "SQL_INTERVAL_DAY_TO_SECOND", "SQL_INTERVAL_HOUR", "SQL_INTERVAL_HOUR_TO_MINUTE",
+ "SQL_INTERVAL_HOUR_TO_SECOND", "SQL_INTERVAL_MINUTE", "SQL_INTERVAL_MINUTE_TO_SECOND", "SQL_INTERVAL_MONTH",
+ "SQL_INTERVAL_SECOND", "SQL_INTERVAL_YEAR", "SQL_INTERVAL_YEAR_TO_MONTH", "SQL_LONGVARBINARY",
+ "SQL_LONGVARCHAR", "SQL_LONGVARNCHAR", "SQL_NCHAR", "SQL_NCLOB", "SQL_NUMERIC", "SQL_NVARCHAR", "SQL_REAL",
+ "SQL_SMALLINT", "SQL_TIME", "SQL_TIMESTAMP", "SQL_TINYINT", "SQL_TSI_DAY", "SQL_TSI_FRAC_SECOND",
+ "SQL_TSI_HOUR", "SQL_TSI_MICROSECOND", "SQL_TSI_MINUTE", "SQL_TSI_MONTH", "SQL_TSI_QUARTER",
+ "SQL_TSI_SECOND", "SQL_TSI_WEEK", "SQL_TSI_YEAR", "SQL_VARBINARY", "SQL_VARCHAR", "SQRT", "START", "STATE",
+ "STATEMENT", "STATIC", "STDDEV_POP", "STDDEV_SAMP", "STREAM", "STRUCTURE", "STYLE", "SUBCLASS_ORIGIN",
+ "SUBMULTISET", "SUBSET", "SUBSTITUTE", "SUBSTRING", "SUBSTRING_REGEX", "SUCCEEDS", "SUM", "SYMMETRIC",
+ "SYSTEM", "SYSTEM_TIME", "SYSTEM_USER", "TABLE", "TABLESAMPLE", "TABLE_NAME", "TEMPORARY", "THEN", "TIES",
+ "TIME", "TIMESTAMP", "TIMESTAMPADD", "TIMESTAMPDIFF", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TINYINT", "TO",
+ "TOP_LEVEL_COUNT", "TRAILING", "TRANSACTION", "TRANSACTIONS_ACTIVE", "TRANSACTIONS_COMMITTED",
+ "TRANSACTIONS_ROLLED_BACK", "TRANSFORM", "TRANSFORMS", "TRANSLATE", "TRANSLATE_REGEX", "TRANSLATION",
+ "TREAT", "TRIGGER", "TRIGGER_CATALOG", "TRIGGER_NAME", "TRIGGER_SCHEMA", "TRIM", "TRIM_ARRAY", "TRUE",
+ "TRUNCATE", "TYPE", "UESCAPE", "UNBOUNDED", "UNCOMMITTED", "UNDER", "UNION", "UNIQUE", "UNKNOWN", "UNNAMED",
+ "UNNEST", "UPDATE", "UPPER", "UPSERT", "USAGE", "USER", "USER_DEFINED_TYPE_CATALOG",
+ "USER_DEFINED_TYPE_CODE", "USER_DEFINED_TYPE_NAME", "USER_DEFINED_TYPE_SCHEMA", "USING", "VALUE", "VALUES",
+ "VALUE_OF", "VARBINARY", "VARCHAR", "VARYING", "VAR_POP", "VAR_SAMP", "VERSION", "VERSIONING", "VIEW",
+ "WEEK", "WHEN", "WHENEVER", "WHERE", "WIDTH_BUCKET", "WINDOW", "WITH", "WITHIN", "WITHOUT", "WORK",
+ "WRAPPER", "WRITE", "XML", "YEAR", "ZONE");
+
+ public static String replaceString(String originString, String fromString, String toString) {
+ return originString.replace(fromString, toString);
+ }
+
+ public static String extractReplace(String originString) {
+ Matcher extractMatcher = EXTRACT_PATTERN.matcher(originString);
+ String replacedString = originString;
+ Map<Integer, Integer> parenthesesPairs = null;
+
+ while (extractMatcher.find()) {
+ if (parenthesesPairs == null) {
+ parenthesesPairs = findParenthesesPairs(originString);
+ }
+
+ String functionStr = extractMatcher.group(2);
+ int startIdx = extractMatcher.end(3);
+ int endIdx = parenthesesPairs.get(extractMatcher.start(1));
+ String extractInner = originString.substring(startIdx, endIdx);
+ int originStart = extractMatcher.start(0);
+ int originEnd = endIdx + 1;
+
+ replacedString = replaceString(replacedString, originString.substring(originStart, originEnd),
+ functionStr + "(" + extractInner + ")");
+ }
+
+ return replacedString;
+ }
+
+ public static String castReplace(String originString) {
+ Matcher castMatcher = CAST_PATTERN.matcher(originString);
+ String replacedString = originString;
+
+ while (castMatcher.find()) {
+ String castStr = castMatcher.group();
+ String type = castMatcher.group(2);
+ String supportedType = "";
+ switch (type.toUpperCase()) {
+ case "INTEGER":
+ supportedType = "int";
+ break;
+ case "SHORT":
+ supportedType = "smallint";
+ break;
+ case "LONG":
+ supportedType = "bigint";
+ break;
+ default:
+ supportedType = type;
+ }
+
+ if (!supportedType.equals(type)) {
+ String replacedCastStr = castStr.replace(type, supportedType);
+ replacedString = replaceString(replacedString, castStr, replacedCastStr);
+ }
+ }
+
+ return replacedString;
+ }
+
+ public static String subqueryReplace(String originString) {
+ Matcher subqueryMatcher = FROM_PATTERN.matcher(originString);
+ String replacedString = originString;
+ Map<Integer, Integer> parenthesesPairs = null;
+
+ while (subqueryMatcher.find()) {
+ if (parenthesesPairs == null) {
+ parenthesesPairs = findParenthesesPairs(originString);
+ }
+
+ int startIdx = subqueryMatcher.start(1);
+ int endIdx = parenthesesPairs.get(startIdx) + 1;
+
+ Matcher aliasMatcher = ALIAS_PATTERN.matcher(originString.substring(endIdx));
+ if (aliasMatcher.find()) {
+ String aliasCandidate = aliasMatcher.group(1);
+
+ if (aliasCandidate != null && !sqlKeyWordsExceptAS.contains(aliasCandidate.toUpperCase())) {
+ continue;
+ }
+
+ replacedString = replaceString(replacedString, originString.substring(startIdx, endIdx),
+ originString.substring(startIdx, endIdx) + " as alias");
+ }
+ }
+
+ return replacedString;
+ }
+
+ public static String timestampaddReplace(String originString) {
+ Matcher timestampaddMatcher = TIMESTAMPADD_PATTERN.matcher(originString);
+ String replacedString = originString;
+
+ while (timestampaddMatcher.find()) {
+ String interval = timestampaddMatcher.group(1);
+ String timestampaddStr = replaceString(timestampaddMatcher.group(), interval, "'" + interval + "'");
+ replacedString = replaceString(replacedString, timestampaddMatcher.group(), timestampaddStr);
+ }
+
+ return replacedString;
+ }
+
+ public static String concatReplace(String originString) {
+ Matcher concatMatcher = CONCAT_PATTERN.matcher(originString);
+ String replacedString = originString;
+
+ while (concatMatcher.find()) {
+ String leftString = concatMatcher.group(1);
+ String rightString = concatMatcher.group(2);
+ replacedString = replaceString(replacedString, leftString + "||" + rightString,
+ "concat(" + leftString + "," + rightString + ")");
+ }
+
+ return replacedString;
+ }
+
+ public static String doConvert(String originStr) {
+ // Step1.Replace " with `
+ String convertedSql = replaceString(originStr, "\"", "`");
+
+ // Step2.Replace extract functions
+ convertedSql = extractReplace(convertedSql);
+
+ // Step3.Replace cast type string
+ convertedSql = castReplace(convertedSql);
+
+ // Step4.Replace sub query
+ convertedSql = subqueryReplace(convertedSql);
+
+ // Step5.Replace char_length with length
+ convertedSql = replaceString(convertedSql, "char_length", "length");
+
+ // Step6.Replace "||" with concat
+ convertedSql = concatReplace(convertedSql);
+
+ // Step7.Add quote for interval in timestampadd
+ convertedSql = timestampaddReplace(convertedSql);
+
+ return convertedSql;
+ }
+
+ private static Map<Integer, Integer> findParenthesesPairs(String sql) {
+ Map<Integer, Integer> result = new HashMap<>();
+ if (sql.length() > 1) {
+ Stack<Integer> lStack = new Stack<>();
+ boolean inStrVal = false;
+ for (int i = 0; i < sql.length(); i++) {
+ switch (sql.charAt(i)) {
+ case '(':
+ if (!inStrVal) {
+ lStack.push(i);
+ }
+ break;
+ case ')':
+ if (!inStrVal && !lStack.empty()) {
+ result.put(lStack.pop(), i);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public String convert(String originSql) {
+ return doConvert(originSql);
+ }
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
deleted file mode 100644
index c4b87f8..0000000
--- a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocConverter.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-package org.apache.kylin.source.adhocquery;
-
-/**
- * convert the query to satisfy the parser of adhoc query engine
- */
-public interface IAdHocConverter {
- String convert(String originSql);
-}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
deleted file mode 100644
index 369325c..0000000
--- a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IAdHocRunner.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.source.adhocquery;
-
-import java.util.List;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-
-public interface IAdHocRunner {
-
- void init(KylinConfig config);
-
- /**
- * Run an ad-hoc query in the source database in case Kylin cannot serve using cube.
- *
- * @param query the query statement
- * @param returnRows an empty list to collect returning rows
- * @param returnColumnMeta an empty list to collect metadata of returning columns
- * @throws Exception if running ad-hoc query fails
- */
- void executeQuery(String query, List<List<String>> returnRows, List<SelectedColumnMeta> returnColumnMeta) throws Exception;
-}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownConverter.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownConverter.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownConverter.java
new file mode 100644
index 0000000..c4e7515
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownConverter.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.adhocquery;
+
+/**
+ * convert the query to satisfy the parser of push down query engine
+ */
+public interface IPushDownConverter {
+ String convert(String originSql);
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownRunner.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownRunner.java b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownRunner.java
new file mode 100644
index 0000000..c8d18aa
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownRunner.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.adhocquery;
+
+import java.util.List;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+
+public interface IPushDownRunner {
+
+ void init(KylinConfig config);
+
+ /**
+ * Run an pushdown query in the source database in case Kylin cannot serve using cube.
+ *
+ * @param query the query statement
+ * @param returnRows an empty list to collect returning rows
+ * @param returnColumnMeta an empty list to collect metadata of returning columns
+ * @throws Exception if running pushdown query fails
+ */
+ void executeQuery(String query, List<List<String>> returnRows, List<SelectedColumnMeta> returnColumnMeta) throws Exception;
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java b/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
deleted file mode 100644
index cfb0f32..0000000
--- a/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HiveAdhocConverterTest.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.adhocquery;
-
-import org.junit.Test;
-
-import junit.framework.TestCase;
-
-public class HiveAdhocConverterTest extends TestCase {
- @Test
- public void testStringReplace() {
- String originString = "select count(*) as cnt from test_kylin_fact where char_length(lstg_format_name) < 10";
- String replacedString = HiveAdhocConverter.replaceString(originString, "char_length", "length");
- assertEquals("select count(*) as cnt from test_kylin_fact where length(lstg_format_name) < 10", replacedString);
- }
-
- @Test
- public void testExtractReplace() {
- String originString = "ignore EXTRACT(YEAR FROM KYLIN_CAL_DT.CAL_DT) ignore";
- String replacedString = HiveAdhocConverter.extractReplace(originString);
- assertEquals("ignore YEAR(KYLIN_CAL_DT.CAL_DT) ignore", replacedString);
- }
-
- @Test
- public void testCastReplace() {
- String originString = "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS INTEGER)) ignore";
- String replacedString = HiveAdhocConverter.castReplace(originString);
- assertEquals("ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS int)) ignore", replacedString);
- }
-
- @Test
- public void testSubqueryReplace1() {
- String originString = "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) group by seller_id,lstg_format_name";
- String replacedString = HiveAdhocConverter.subqueryReplace(originString);
- assertEquals(
- "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) as alias group by seller_id,lstg_format_name",
- replacedString);
- }
-
- @Test
- public void testSubqueryReplace2() {
- String originString = "select count(*) from ( select test_kylin_fact.lstg_format_name from test_kylin_fact where test_kylin_fact.lstg_format_name='FP-GTC' group by test_kylin_fact.lstg_format_name ) t ";
- String replacedString = HiveAdhocConverter.subqueryReplace(originString);
- assertEquals(originString, replacedString);
- }
-
- @Test
- public void testSubqueryReplace3() {
- String originString = "select fact.lstg_format_name from (select * from test_kylin_fact where cal_dt > date'2010-01-01' ) as fact group by fact.lstg_format_name order by CASE WHEN fact.lstg_format_name IS NULL THEN 'sdf' ELSE fact.lstg_format_name END ";
- String replacedString = HiveAdhocConverter.subqueryReplace(originString);
- assertEquals(originString, replacedString);
- }
-
- @Test
- public void testConcatReplace() {
- String originString = "select count(*) as cnt from test_kylin_fact where lstg_format_name||'a'='ABINa'";
- String replacedString = HiveAdhocConverter.concatReplace(originString);
- assertEquals("select count(*) as cnt from test_kylin_fact where concat(lstg_format_name,'a')='ABINa'",
- replacedString);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HivePushDownConverterTest.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HivePushDownConverterTest.java b/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HivePushDownConverterTest.java
new file mode 100644
index 0000000..42c628a
--- /dev/null
+++ b/core-metadata/src/test/java/org/apache/kylin/source/adhocquery/HivePushDownConverterTest.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.adhocquery;
+
+import org.junit.Test;
+
+import junit.framework.TestCase;
+
+public class HivePushDownConverterTest extends TestCase {
+ @Test
+ public void testStringReplace() {
+ String originString = "select count(*) as cnt from test_kylin_fact where char_length(lstg_format_name) < 10";
+ String replacedString = HivePushDownConverter.replaceString(originString, "char_length", "length");
+ assertEquals("select count(*) as cnt from test_kylin_fact where length(lstg_format_name) < 10", replacedString);
+ }
+
+ @Test
+ public void testExtractReplace() {
+ String originString = "ignore EXTRACT(YEAR FROM KYLIN_CAL_DT.CAL_DT) ignore";
+ String replacedString = HivePushDownConverter.extractReplace(originString);
+ assertEquals("ignore YEAR(KYLIN_CAL_DT.CAL_DT) ignore", replacedString);
+ }
+
+ @Test
+ public void testCastReplace() {
+ String originString = "ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS INTEGER)) ignore";
+ String replacedString = HivePushDownConverter.castReplace(originString);
+ assertEquals("ignore EXTRACT(YEAR FROM CAST(KYLIN_CAL_DT.CAL_DT AS int)) ignore", replacedString);
+ }
+
+ @Test
+ public void testSubqueryReplace1() {
+ String originString = "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) group by seller_id,lstg_format_name";
+ String replacedString = HivePushDownConverter.subqueryReplace(originString);
+ assertEquals(
+ "select seller_id,lstg_format_name,sum(price) from (select * from test_kylin_fact where (lstg_format_name='FP-GTC') limit 20) as alias group by seller_id,lstg_format_name",
+ replacedString);
+ }
+
+ @Test
+ public void testSubqueryReplace2() {
+ String originString = "select count(*) from ( select test_kylin_fact.lstg_format_name from test_kylin_fact where test_kylin_fact.lstg_format_name='FP-GTC' group by test_kylin_fact.lstg_format_name ) t ";
+ String replacedString = HivePushDownConverter.subqueryReplace(originString);
+ assertEquals(originString, replacedString);
+ }
+
+ @Test
+ public void testSubqueryReplace3() {
+ String originString = "select fact.lstg_format_name from (select * from test_kylin_fact where cal_dt > date'2010-01-01' ) as fact group by fact.lstg_format_name order by CASE WHEN fact.lstg_format_name IS NULL THEN 'sdf' ELSE fact.lstg_format_name END ";
+ String replacedString = HivePushDownConverter.subqueryReplace(originString);
+ assertEquals(originString, replacedString);
+ }
+
+ @Test
+ public void testConcatReplace() {
+ String originString = "select count(*) as cnt from test_kylin_fact where lstg_format_name||'a'='ABINa'";
+ String replacedString = HivePushDownConverter.concatReplace(originString);
+ assertEquals("select count(*) as cnt from test_kylin_fact where concat(lstg_format_name,'a')='ABINa'",
+ replacedString);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 35386de..6a571df 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -186,14 +186,14 @@ kylin.engine.spark-conf.spark.yarn.am.extraJavaOptions=-Dhdp.version=current
kylin.engine.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
-### AD-HOC QUERY ###
-#kylin.query.ad-hoc.runner-class-name=org.apache.kylin.query.adhoc.AdHocRunnerJdbcImpl
+### QUERY PUSH DOWN ###
+#kylin.query.pushdown.runner-class-name=org.apache.kylin.query.adhoc.PushDownRunnerJdbcImpl
-#kylin.query.ad-hoc.jdbc.url=jdbc:hive2://sandbox:10000/default
-#kylin.query.ad-hoc.jdbc.driver=org.apache.hive.jdbc.HiveDriver
-#kylin.query.ad-hoc.jdbc.username=hive
-#kylin.query.ad-hoc.jdbc.password=
+#kylin.query.pushdown.jdbc.url=jdbc:hive2://sandbox:10000/default
+#kylin.query.pushdown.jdbc.driver=org.apache.hive.jdbc.HiveDriver
+#kylin.query.pushdown.jdbc.username=hive
+#kylin.query.pushdown.jdbc.password=
-#kylin.query.ad-hoc.jdbc.pool-max-total=8
-#kylin.query.ad-hoc.jdbc.pool-max-idle=8
-#kylin.query.ad-hoc.jdbc.pool-min-idle=0
+#kylin.query.pushdown.jdbc.pool-max-total=8
+#kylin.query.pushdown.jdbc.pool-max-idle=8
+#kylin.query.pushdown.jdbc.pool-min-idle=0
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index af05237..3634100 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -51,7 +51,7 @@ import org.apache.kylin.metadata.project.ProjectInstance;
import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
import org.apache.kylin.query.relnode.OLAPContext;
import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
-import org.apache.kylin.rest.util.AdHocUtil;
+import org.apache.kylin.rest.util.PushDownUtil;
import org.dbunit.DatabaseUnitException;
import org.dbunit.database.DatabaseConfig;
import org.dbunit.database.DatabaseConnection;
@@ -263,7 +263,7 @@ public class KylinTestBase {
} catch (SQLException sqlException) {
List<List<String>> results = Lists.newArrayList();
List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
- AdHocUtil.doAdHocQuery(ProjectInstance.DEFAULT_PROJECT_NAME, sql, results, columnMetas, sqlException);
+ PushDownUtil.doPushDownQuery(ProjectInstance.DEFAULT_PROJECT_NAME, sql, results, columnMetas, sqlException);
return results.size();
} finally {
if (resultSet != null) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/query/src/main/java/org/apache/kylin/query/adhoc/AdHocRunnerJdbcImpl.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/adhoc/AdHocRunnerJdbcImpl.java b/query/src/main/java/org/apache/kylin/query/adhoc/AdHocRunnerJdbcImpl.java
deleted file mode 100644
index 852cebf..0000000
--- a/query/src/main/java/org/apache/kylin/query/adhoc/AdHocRunnerJdbcImpl.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.query.adhoc;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.commons.pool.impl.GenericObjectPool;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-import org.apache.kylin.source.adhocquery.IAdHocRunner;
-
-public class AdHocRunnerJdbcImpl implements IAdHocRunner {
-
- private static org.apache.kylin.query.adhoc.JdbcConnectionPool pool = null;
-
- @Override
- public void init(KylinConfig config) {
- if (pool == null) {
- pool = new JdbcConnectionPool();
- JdbcConnectionFactory factory = new JdbcConnectionFactory(config.getJdbcUrl(), config.getJdbcDriverClass(),
- config.getJdbcUsername(), config.getJdbcPassword());
- GenericObjectPool.Config poolConfig = new GenericObjectPool.Config();
- poolConfig.maxActive = config.getPoolMaxTotal();
- poolConfig.maxIdle = config.getPoolMaxIdle();
- poolConfig.minIdle = config.getPoolMinIdle();
-
- try {
- pool.createPool(factory, poolConfig);
- } catch (IOException e) {
- throw new RuntimeException(e.getMessage(), e);
- }
- }
- }
-
- @Override
- public void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas)
- throws Exception {
- Statement statement = null;
- Connection connection = this.getConnection();
- ResultSet resultSet = null;
-
- try {
- statement = connection.createStatement();
- resultSet = statement.executeQuery(query);
- extractResults(resultSet, results);
- } catch (SQLException sqlException) {
- throw sqlException;
- }
-
- //extract column metadata
- ResultSetMetaData metaData = null;
- int columnCount = 0;
- try {
- metaData = resultSet.getMetaData();
- columnCount = metaData.getColumnCount();
-
- // fill in selected column meta
- for (int i = 1; i <= columnCount; ++i) {
- columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false,
- metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i),
- metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null,
- metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
- metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false));
- }
-
- } catch (SQLException sqlException) {
- throw sqlException;
- }
-
- closeConnection(connection);
- }
-
- private Connection getConnection() {
- return pool.getConnection();
- }
-
- private void closeConnection(Connection connection) {
- pool.returnConnection(connection);
- }
-
- static void extractResults(ResultSet resultSet, List<List<String>> results) throws SQLException {
- List<String> oneRow = new LinkedList<String>();
-
- try {
- while (resultSet.next()) {
- //logger.debug("resultSet value: " + resultSet.getString(1));
- for (int i = 0; i < resultSet.getMetaData().getColumnCount(); i++) {
- oneRow.add((resultSet.getString(i + 1)));
- }
-
- results.add(new LinkedList<String>(oneRow));
- oneRow.clear();
- }
- } catch (SQLException sqlException) {
- throw sqlException;
- }
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/query/src/main/java/org/apache/kylin/query/adhoc/PushDownRunnerJdbcImpl.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/adhoc/PushDownRunnerJdbcImpl.java b/query/src/main/java/org/apache/kylin/query/adhoc/PushDownRunnerJdbcImpl.java
new file mode 100644
index 0000000..8001880
--- /dev/null
+++ b/query/src/main/java/org/apache/kylin/query/adhoc/PushDownRunnerJdbcImpl.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.query.adhoc;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.pool.impl.GenericObjectPool;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.source.adhocquery.IPushDownRunner;
+
+public class PushDownRunnerJdbcImpl implements IPushDownRunner {
+
+ private static org.apache.kylin.query.adhoc.JdbcConnectionPool pool = null;
+
+ @Override
+ public void init(KylinConfig config) {
+ if (pool == null) {
+ pool = new JdbcConnectionPool();
+ JdbcConnectionFactory factory = new JdbcConnectionFactory(config.getJdbcUrl(), config.getJdbcDriverClass(),
+ config.getJdbcUsername(), config.getJdbcPassword());
+ GenericObjectPool.Config poolConfig = new GenericObjectPool.Config();
+ poolConfig.maxActive = config.getPoolMaxTotal();
+ poolConfig.maxIdle = config.getPoolMaxIdle();
+ poolConfig.minIdle = config.getPoolMinIdle();
+
+ try {
+ pool.createPool(factory, poolConfig);
+ } catch (IOException e) {
+ throw new RuntimeException(e.getMessage(), e);
+ }
+ }
+ }
+
+ @Override
+ public void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas)
+ throws Exception {
+ Statement statement = null;
+ Connection connection = this.getConnection();
+ ResultSet resultSet = null;
+
+ try {
+ statement = connection.createStatement();
+ resultSet = statement.executeQuery(query);
+ extractResults(resultSet, results);
+ } catch (SQLException sqlException) {
+ throw sqlException;
+ }
+
+ //extract column metadata
+ ResultSetMetaData metaData = null;
+ int columnCount = 0;
+ try {
+ metaData = resultSet.getMetaData();
+ columnCount = metaData.getColumnCount();
+
+ // fill in selected column meta
+ for (int i = 1; i <= columnCount; ++i) {
+ columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false,
+ metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i),
+ metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null,
+ metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
+ metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false));
+ }
+
+ } catch (SQLException sqlException) {
+ throw sqlException;
+ }
+
+ closeConnection(connection);
+ }
+
+ private Connection getConnection() {
+ return pool.getConnection();
+ }
+
+ private void closeConnection(Connection connection) {
+ pool.returnConnection(connection);
+ }
+
+ static void extractResults(ResultSet resultSet, List<List<String>> results) throws SQLException {
+ List<String> oneRow = new LinkedList<String>();
+
+ try {
+ while (resultSet.next()) {
+ //logger.debug("resultSet value: " + resultSet.getString(1));
+ for (int i = 0; i < resultSet.getMetaData().getColumnCount(); i++) {
+ oneRow.add((resultSet.getString(i + 1)));
+ }
+
+ results.add(new LinkedList<String>(oneRow));
+ oneRow.clear();
+ }
+ } catch (SQLException sqlException) {
+ throw sqlException;
+ }
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/7002dd86/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java b/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
index d841dee..79a2c05 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/response/SQLResponse.java
@@ -61,12 +61,13 @@ public class SQLResponse implements Serializable {
protected boolean storageCacheUsed = false;
- protected boolean queryAdHoc = false;
+ protected boolean queryPushDown = false;
public SQLResponse() {
}
- public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, int affectedRowCount, boolean isException, String exceptionMessage) {
+ public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, int affectedRowCount,
+ boolean isException, String exceptionMessage) {
this.columnMetas = columnMetas;
this.results = results;
this.affectedRowCount = affectedRowCount;
@@ -74,7 +75,8 @@ public class SQLResponse implements Serializable {
this.exceptionMessage = exceptionMessage;
}
- public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube, int affectedRowCount, boolean isException, String exceptionMessage) {
+ public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube,
+ int affectedRowCount, boolean isException, String exceptionMessage) {
this.columnMetas = columnMetas;
this.results = results;
this.cube = cube;
@@ -83,7 +85,8 @@ public class SQLResponse implements Serializable {
this.exceptionMessage = exceptionMessage;
}
- public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube, int affectedRowCount, boolean isException, String exceptionMessage, boolean isPartial, boolean isAdhoc) {
+ public SQLResponse(List<SelectedColumnMeta> columnMetas, List<List<String>> results, String cube,
+ int affectedRowCount, boolean isException, String exceptionMessage, boolean isPartial, boolean isPushDown) {
this.columnMetas = columnMetas;
this.results = results;
this.cube = cube;
@@ -91,7 +94,7 @@ public class SQLResponse implements Serializable {
this.isException = isException;
this.exceptionMessage = exceptionMessage;
this.isPartial = isPartial;
- this.queryAdHoc = isAdhoc;
+ this.queryPushDown = isPushDown;
}
public List<SelectedColumnMeta> getColumnMetas() {
@@ -147,8 +150,8 @@ public class SQLResponse implements Serializable {
return isPartial;
}
- public boolean isAdHoc() {
- return queryAdHoc;
+ public boolean isPushDown() {
+ return queryPushDown;
}
public long getTotalScanCount() {