You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2020/02/28 12:25:58 UTC

[kylin] branch master updated: KYLIN-4370 Fix "Table or view not found" error when build cube with spark for JDBC source at step "Build cube by Layer"

This is an automated email from the ASF dual-hosted git repository.

shaofengshi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/master by this push:
     new ebd49cb  KYLIN-4370 Fix "Table or view not found" error when build cube with spark for JDBC source at step "Build cube by Layer"
ebd49cb is described below

commit ebd49cb4549c74e014a01a05de00e32e87391ed5
Author: rupengwang <wa...@live.cn>
AuthorDate: Thu Feb 20 14:29:07 2020 +0800

    KYLIN-4370 Fix "Table or view not found" error when build cube with spark for JDBC source at step "Build cube by Layer"
---
 .../src/main/java/org/apache/kylin/engine/spark/SparkUtil.java        | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
index 5e7d66e..fcd24f1 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
@@ -52,6 +52,7 @@ import org.apache.spark.api.java.function.Function;
 import org.apache.spark.sql.Dataset;
 import org.apache.spark.sql.Row;
 import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.hive.HiveUtils;
 
 public class SparkUtil {
 
@@ -167,7 +168,8 @@ public class SparkUtil {
     }
 
     private static JavaRDD<String[]> getOtherFormatHiveInput(JavaSparkContext sc, String hiveTable) {
-        SparkSession sparkSession = SparkSession.builder().config(sc.getConf()).enableHiveSupport().getOrCreate();
+        SparkSession sparkSession = SparkSession.builder().sparkContext(HiveUtils.withHiveExternalCatalog(sc.sc()))
+                .config(sc.getConf()).enableHiveSupport().getOrCreate();
         final Dataset intermediateTable = sparkSession.table(hiveTable);
         return intermediateTable.javaRDD().map(new Function<Row, String[]>() {
             @Override