You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2020/11/09 02:13:27 UTC

[kylin] branch kylin-on-parquet-v2 updated: KYLIN-4811 Support cube level configuration for BuildingJob

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this push:
     new 270b332  KYLIN-4811 Support cube level configuration for BuildingJob
270b332 is described below

commit 270b3326dcb0a6f0f7342323cb214bb1a9ece032
Author: XiaoxiangYu <xx...@apache.org>
AuthorDate: Sun Nov 8 21:12:46 2020 +0800

    KYLIN-4811 Support cube level configuration for BuildingJob
---
 .../apache/kylin/engine/spark/job/NSparkExecutable.java    | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)

diff --git a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
index dd306d4..98f63a1 100644
--- a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
+++ b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
@@ -36,6 +36,8 @@ import java.util.Map.Entry;
 
 import java.util.Set;
 
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.spark.utils.MetaDumpUtil;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.io.FileUtils;
@@ -103,7 +105,10 @@ public class NSparkExecutable extends AbstractExecutable {
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         //context.setLogPath(getSparkDriverLogHdfsPath(context.getConfig()));
-        final KylinConfig config = wrapConfig(context);
+        CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+        CubeInstance cube = cubeMgr.getCube(this.getCubeName());
+        KylinConfig config = cube.getConfig();
+        config = wrapConfig(config);
 
         String sparkHome = KylinConfig.getSparkHome();
         if (StringUtils.isEmpty(sparkHome) && !config.isUTEnv()) {
@@ -192,9 +197,12 @@ public class NSparkExecutable extends AbstractExecutable {
         return String.format("%s.%s.log", config.getJobTmpOutputStorePath(getProject(), getId()),
                 System.currentTimeMillis());
     }*/
-
+    
     protected KylinConfig wrapConfig(ExecutableContext context) {
-        KylinConfig originalConfig = context.getConfig();
+        return wrapConfig(context.getConfig());
+    }
+
+    protected KylinConfig wrapConfig(KylinConfig originalConfig) {
         String project = getParam(MetadataConstants.P_PROJECT_NAME);
         Preconditions.checkState(StringUtils.isNotBlank(project), "job " + getId() + " project info is empty");