You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2018/08/20 08:06:18 UTC

[kylin] branch 2.3.x updated: KYLIN-3500 fix dumplicated tablename at createSqoopToFlatHiveStep when use jdbc...

This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch 2.3.x
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/2.3.x by this push:
     new f0057d3  KYLIN-3500 fix dumplicated tablename at createSqoopToFlatHiveStep when use jdbc...
f0057d3 is described below

commit f0057d380ca1804223523e6a988651e0abfc7c13
Author: shaofengshi <sh...@apache.org>
AuthorDate: Mon Aug 20 16:06:28 2018 +0800

    KYLIN-3500 fix dumplicated tablename at createSqoopToFlatHiveStep when use jdbc...
---
 .../src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
index bbaaa4a..f9da636 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/jdbc/JdbcHiveMRInput.java
@@ -186,7 +186,7 @@ public class JdbcHiveMRInput extends HiveMRInput {
             String cmd = String.format("%s/sqoop import -Dorg.apache.sqoop.splitter.allow_text_splitter=true "
                     + "-Dmapreduce.job.queuename=%s "
                     + "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
-                    + "--target-dir %s/%s --split-by %s.%s --boundary-query \"%s\" --null-string '' "
+                    + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
                     + "--fields-terminated-by '%s' --num-mappers %d", sqoopHome, queueName, connectionUrl, driverClass,
                     jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, splitTable, splitColumn, bquery,
                     filedDelimiter, mapperNum);