You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2020/02/28 12:27:07 UTC
[kylin] branch 3.0.x updated: KYLIN-4370 Fix "Table or view not
found" error when build cube with spark for JDBC source at step "Build cube
by Layer"
This is an automated email from the ASF dual-hosted git repository.
shaofengshi pushed a commit to branch 3.0.x
in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/3.0.x by this push:
new 18230a9 KYLIN-4370 Fix "Table or view not found" error when build cube with spark for JDBC source at step "Build cube by Layer"
18230a9 is described below
commit 18230a978ce70019411958849685a0adb5d2d862
Author: rupengwang <wa...@live.cn>
AuthorDate: Thu Feb 20 14:29:07 2020 +0800
KYLIN-4370 Fix "Table or view not found" error when build cube with spark for JDBC source at step "Build cube by Layer"
---
.../src/main/java/org/apache/kylin/engine/spark/SparkUtil.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
index 5e7d66e..fcd24f1 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkUtil.java
@@ -52,6 +52,7 @@ import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.hive.HiveUtils;
public class SparkUtil {
@@ -167,7 +168,8 @@ public class SparkUtil {
}
private static JavaRDD<String[]> getOtherFormatHiveInput(JavaSparkContext sc, String hiveTable) {
- SparkSession sparkSession = SparkSession.builder().config(sc.getConf()).enableHiveSupport().getOrCreate();
+ SparkSession sparkSession = SparkSession.builder().sparkContext(HiveUtils.withHiveExternalCatalog(sc.sc()))
+ .config(sc.getConf()).enableHiveSupport().getOrCreate();
final Dataset intermediateTable = sparkSession.table(hiveTable);
return intermediateTable.javaRDD().map(new Function<Row, String[]>() {
@Override