You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2020/09/03 10:33:38 UTC

[kylin] branch kylin-on-parquet-v2 updated: KYLIN-4744 Add tracking URL for build spark job on yarn

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this push:
     new 582969d  KYLIN-4744 Add tracking URL for build spark job on yarn
582969d is described below

commit 582969ded2ab5c89bdc28e86b1514e6aa1b794b2
Author: yaqian.zhang <59...@qq.com>
AuthorDate: Thu Sep 3 16:10:50 2020 +0800

    KYLIN-4744 Add tracking URL for build spark job on yarn
---
 .../kylin/engine/spark/job/NSparkExecutable.java   | 22 ++++++++++++++++------
 1 file changed, 16 insertions(+), 6 deletions(-)

diff --git a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
index bfab276..7f5e4f0 100644
--- a/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
+++ b/kylin-spark-project/kylin-spark-engine/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
@@ -57,8 +57,8 @@ import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.CliCommandExecutor;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.job.common.PatternedLogger;
+import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
@@ -245,22 +245,32 @@ public class NSparkExecutable extends AbstractExecutable {
             String kylinJobJar, String appArgs, String jobId) {
         PatternedLogger patternedLogger;
         if (config.isJobLogPrintEnabled()) {
-            patternedLogger = new PatternedLogger(logger);
+            patternedLogger = new PatternedLogger(logger, new PatternedLogger.ILogListener() {
+                @Override
+                public void onLogEvent(String infoKey, Map<String, String> info) {
+                    // only care three properties here
+                    if (ExecutableConstants.SPARK_JOB_ID.equals(infoKey)
+                            || ExecutableConstants.YARN_APP_ID.equals(infoKey)
+                            || ExecutableConstants.YARN_APP_URL.equals(infoKey)) {
+                        getManager().addJobInfo(getId(), info);
+                    }
+                }
+            });
         } else {
             patternedLogger = new PatternedLogger(null);
         }
-
         try {
             String cmd = generateSparkCmd(config, hadoopConf, jars, kylinJobJar, appArgs);
+            patternedLogger.log("cmd: ");
+            patternedLogger.log(cmd);
 
             CliCommandExecutor exec = new CliCommandExecutor();
-            Pair<Integer, String> result = exec.execute(cmd, patternedLogger, jobId);
+            exec.execute(cmd, patternedLogger, jobId);
             updateMetaAfterBuilding(config);
             //Add metrics information to execute result for JobMetricsFacade
-
             getManager().addJobInfo(getId(), getJobMetricsInfo(config));
             Map<String, String> extraInfo = makeExtraInfo(patternedLogger.getInfo());
-            ExecuteResult ret = ExecuteResult.createSucceed(result.getSecond());
+            ExecuteResult ret = ExecuteResult.createSucceed(patternedLogger.getBufferedLog());
             ret.getExtraInfo().putAll(extraInfo);
             return ret;
         } catch (Exception e) {