You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2020/09/15 03:28:46 UTC

[zeppelin] branch master updated: [ZEPPELIN-5034]. spark.driver.extraClassPath in spark-defaults.xml is override by zeppelin

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new e703361  [ZEPPELIN-5034]. spark.driver.extraClassPath in spark-defaults.xml is override by zeppelin
e703361 is described below

commit e703361eceebabdcf53e378e5be21fd4b7714ba1
Author: Jeff Zhang <zj...@apache.org>
AuthorDate: Sat Sep 12 14:39:42 2020 +0800

    [ZEPPELIN-5034]. spark.driver.extraClassPath in spark-defaults.xml is override by zeppelin
    
    ### What is this PR for?
    
    The issue is that `spark.driver.extraClassPath` in spark-defaults.conf won't take effect because it would be override by zeppelin. Because in interpreter.sh zeppelin specify `--driver-class-path` explicitly. This PR fix the issue by pass `spark.driver.extraClassPath` in spark-defaults.conf to interpreter.sh via env `ZEPPELIN_INTP_CLASS`.
    
    ### What type of PR is it?
    [Bug Fix ]
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
    * https://issues.apache.org/jira/browse/ZEPPELIN-5034
    
    ### How should this be tested?
    * Manually tested
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Jeff Zhang <zj...@apache.org>
    
    Closes #3902 from zjffdu/ZEPPELIN-5034 and squashes the following commits:
    
    804c602c2 [Jeff Zhang] [ZEPPELIN-5034]. spark.driver.extraClassPath in spark-defaults.xml is override by zeppelin
---
 bin/interpreter.sh                                    |  3 +--
 .../launcher/SparkInterpreterLauncher.java            | 19 +++++++++++++++++++
 2 files changed, 20 insertions(+), 2 deletions(-)

diff --git a/bin/interpreter.sh b/bin/interpreter.sh
index c81ca0b..d1d6315 100755
--- a/bin/interpreter.sh
+++ b/bin/interpreter.sh
@@ -94,9 +94,8 @@ fi
 
 check_java_version
 
-
 ZEPPELIN_INTERPRETER_API_JAR=$(find "${ZEPPELIN_HOME}/interpreter" -name 'zeppelin-interpreter-shaded-*.jar')
-ZEPPELIN_INTP_CLASSPATH="${CLASSPATH}:${ZEPPELIN_INTERPRETER_API_JAR}"
+ZEPPELIN_INTP_CLASSPATH+=":${CLASSPATH}:${ZEPPELIN_INTERPRETER_API_JAR}"
 
 # construct classpath
 if [[ -d "${ZEPPELIN_HOME}/zeppelin-interpreter/target/classes" ]]; then
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
index 2d8fde5..66340ca 100644
--- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
@@ -193,6 +193,25 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher {
     }
 
     env.put("PYSPARK_PIN_THREAD", "true");
+
+    // ZEPPELIN_INTP_CLASSPATH
+    String sparkConfDir = getEnv("SPARK_CONF_DIR");
+    if (StringUtils.isBlank(sparkConfDir)) {
+      String sparkHome = getEnv("SPARK_HOME");
+      sparkConfDir = sparkHome + "/conf";
+    }
+    Properties sparkDefaultProperties = new Properties();
+    File sparkDefaultFile = new File(sparkConfDir, "spark-defaults.conf");
+    if (sparkDefaultFile.exists()) {
+      sparkDefaultProperties.load(new FileInputStream(sparkDefaultFile));
+      String driverExtraClassPath = sparkDefaultProperties.getProperty("spark.driver.extraClassPath");
+      if (!StringUtils.isBlank(driverExtraClassPath)) {
+        env.put("ZEPPELIN_INTP_CLASSPATH", driverExtraClassPath);
+      }
+    } else {
+      LOGGER.warn("spark-defaults.conf doesn't exist: " + sparkDefaultFile.getAbsolutePath());
+    }
+
     LOGGER.debug("buildEnvFromProperties: " + env);
     return env;