You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ni...@apache.org on 2020/02/07 14:26:12 UTC

[kylin] 17/44: KYLIN-4297 Build cube throw NPE when partition column is not set in JDBC Data Source

This is an automated email from the ASF dual-hosted git repository.

nic pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 892d68b4156aaf5acf4d05e11b61e7c58f1eded3
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Mon Dec 16 10:14:39 2019 +0800

    KYLIN-4297 Build cube throw NPE when partition column is not set in JDBC Data Source
---
 .../main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
index 560aa19..f2264e0 100644
--- a/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
+++ b/source-jdbc/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveInputBase.java
@@ -223,13 +223,11 @@ public class JdbcHiveInputBase extends HiveInputBase {
 
             String splitTableAlias;
             String splitColumn;
-            String quoteFullNamedColumn; // `table.column`
             String splitDatabase;
             TblColRef splitColRef = determineSplitColumn();
             splitTableAlias = splitColRef.getTableAlias();
 
             splitColumn = getColumnIdentityQuoted(splitColRef, jdbcMetadataDialect, metaMap, true);
-            quoteFullNamedColumn = quoteIdentifier(partCol, jdbcMetadataDialect.getDialect());
             splitDatabase = splitColRef.getColumnDesc().getTable().getDatabase();
 
             String selectSql = generateSelectDataStatementRDBMS(flatDesc, true, new String[] { partCol },
@@ -247,8 +245,8 @@ public class JdbcHiveInputBase extends HiveInputBase {
             String filedDelimiter = config.getJdbcSourceFieldDelimiter();
             int mapperNum = config.getSqoopMapperNum();
 
-            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", quoteFullNamedColumn,
-                    quoteFullNamedColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
+            String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", splitColumn,
+                    splitColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
                     getTableIdentityQuoted(splitColRef.getTableRef(), metaMap, jdbcMetadataDialect, true));
             if (partitionDesc.isPartitioned()) {
                 SegmentRange segRange = flatDesc.getSegRange();
@@ -273,7 +271,7 @@ public class JdbcHiveInputBase extends HiveInputBase {
                     + "--connect \"%s\" --driver %s --username %s --password \"%s\" --query \"%s AND \\$CONDITIONS\" "
                     + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '%s' "
                     + "--null-non-string '%s' --fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl,
-                    driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, partCol, bquery,
+                    driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery,
                     sqoopNullString, sqoopNullNonString, filedDelimiter, mapperNum);
             logger.debug("sqoop cmd : {}", cmd);
             CmdStep step = new CmdStep();