You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@zeppelin.apache.org by zj...@apache.org on 2021/12/14 07:26:36 UTC

[zeppelin] branch branch-0.10 updated: [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime

This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch branch-0.10
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/branch-0.10 by this push:
     new 366b812  [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime
366b812 is described below

commit 366b812c2840ea4836fd31df95b0436ffa8a52b9
Author: huage1994 <gu...@foxmail.com>
AuthorDate: Mon Nov 22 17:46:41 2021 +0800

    [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime
    
    Currently we detect scala version via scala.util.Properties.versionString();
    but it depends on the resource file library.version on classpath, sometimes user may package this resource of scala-2.11 into his jar which cause we detect the wrong scala version.
    
    [Bug Fix]
    
    * [ ] - Task
    
    https://issues.apache.org/jira/browse/ZEPPELIN-5580
    
    CI pass
    
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: huage1994 <gu...@foxmail.com>
    
    Closes #4267 from huage1994/ZEPPELIN-5580 and squashes the following commits:
    
    7945faa7e6 [huage1994] [ZEPPELIN-5580] Pass scala version from SparkInterpreterLauncher instead of detect it at runtime
    
    (cherry picked from commit d5d26583f723d9638d69020aab885afefab82c5c)
    Signed-off-by: Jeff Zhang <zj...@apache.org>
---
 .../apache/zeppelin/spark/SparkInterpreter.java    | 27 ++++++++++++++++------
 .../launcher/SparkInterpreterLauncher.java         |  9 +++++++-
 2 files changed, 28 insertions(+), 8 deletions(-)

diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index 7b1460a..a4c9b76 100644
--- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -74,6 +74,7 @@ public class SparkInterpreter extends AbstractInterpreter {
   private Object sparkSession;
 
   private SparkVersion sparkVersion;
+  private String scalaVersion;
   private boolean enableSupportedVersionCheck;
 
   public SparkInterpreter(Properties properties) {
@@ -151,7 +152,7 @@ public class SparkInterpreter extends AbstractInterpreter {
    * @throws Exception
    */
   private AbstractSparkScalaInterpreter loadSparkScalaInterpreter(SparkConf conf) throws Exception {
-    String scalaVersion = extractScalaVersion();
+    scalaVersion = extractScalaVersion(conf);
     ClassLoader scalaInterpreterClassLoader = Thread.currentThread().getContextClassLoader();
 
     String zeppelinHome = System.getenv("ZEPPELIN_HOME");
@@ -254,14 +255,26 @@ public class SparkInterpreter extends AbstractInterpreter {
     return sparkVersion;
   }
 
-  private String extractScalaVersion() throws InterpreterException {
-    String scalaVersionString = scala.util.Properties.versionString();
+  private String extractScalaVersion(SparkConf conf) throws InterpreterException {
+    // Use the scala version if SparkLauncher pass it by name of "zeppelin.spark.scala.version".
+
+    // If not, detect scala version by resource file library.version on classpath.
+    // Library.version is sometimes inaccurate and it is mainly used for unit test.
+    String scalaVersionString;
+    if (conf.contains("zeppelin.spark.scala.version")) {
+      scalaVersionString = conf.get("zeppelin.spark.scala.version");
+    } else {
+      scalaVersionString = scala.util.Properties.versionString();
+    }
     LOGGER.info("Using Scala: " + scalaVersionString);
-    if (scalaVersionString.contains("version 2.10")) {
+
+    if (StringUtils.isEmpty(scalaVersionString)) {
+      throw new InterpreterException("Scala Version is empty");
+    } else if (scalaVersionString.contains("2.10")) {
       return "2.10";
-    } else if (scalaVersionString.contains("version 2.11")) {
+    } else if (scalaVersionString.contains("2.11")) {
       return "2.11";
-    } else if (scalaVersionString.contains("version 2.12")) {
+    } else if (scalaVersionString.contains("2.12")) {
       return "2.12";
     } else {
       throw new InterpreterException("Unsupported scala version: " + scalaVersionString);
@@ -269,7 +282,7 @@ public class SparkInterpreter extends AbstractInterpreter {
   }
 
   public boolean isScala212() throws InterpreterException {
-    return extractScalaVersion().equals("2.12");
+    return scalaVersion.equals("2.12");
   }
 
   public boolean isScala210() throws InterpreterException {
diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
index 50bdc88..2aa7ccb 100644
--- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
+++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
@@ -118,6 +118,14 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher {
     }
 
 
+    String scalaVersion = null;
+    try {
+      scalaVersion = detectSparkScalaVersion(getEnv("SPARK_HOME"), env);
+      context.getProperties().put("zeppelin.spark.scala.version", scalaVersion);
+    } catch (Exception e) {
+      throw new IOException("Fail to detect scala version, the reason is:"+ e.getMessage());
+    }
+
     if (isYarnMode()
         && getDeployMode().equals("cluster")) {
       try {
@@ -133,7 +141,6 @@ public class SparkInterpreterLauncher extends StandardInterpreterLauncher {
           }
         }
 
-        String scalaVersion = detectSparkScalaVersion(getEnv("SPARK_HOME"), env);
         Path scalaFolder =  Paths.get(zConf.getZeppelinHome(), "/interpreter/spark/scala-" + scalaVersion);
         if (!scalaFolder.toFile().exists()) {
           throw new IOException("spark scala folder " + scalaFolder.toFile() + " doesn't exist");