You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/07/25 15:20:05 UTC

[spark] branch master updated: [SPARK-32434][CORE] Support Scala 2.13 in AbstractCommandBuilder and load-spark-env scripts

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aab1e09  [SPARK-32434][CORE] Support Scala 2.13 in AbstractCommandBuilder and load-spark-env scripts
aab1e09 is described below

commit aab1e09f1c40b9d51554bbb4afca67e8d4ccbac6
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Sat Jul 25 08:19:02 2020 -0700

    [SPARK-32434][CORE] Support Scala 2.13 in AbstractCommandBuilder and load-spark-env scripts
    
    ### What changes were proposed in this pull request?
    
    This PR aims to support Scala 2.11 at `AbstractCommandBuilder.java` and `load-spark-env` scripts.
    
    ### Why are the changes needed?
    
    Currently, Scala 2.12 is only supported and the following fails.
    ```
    $ dev/change-scala-version.sh 2.13
    $ build/mvn test -pl core --am -Pscala-2.13 -DwildcardSuites=none -Dtest=org.apache.spark.launcher.SparkLauncherSuite
    ...
    [ERROR] Failures:
    [ERROR]   SparkLauncherSuite.testChildProcLauncher:123 expected:<0> but was:<1>
    [ERROR]   SparkLauncherSuite.testSparkLauncherGetError:274
    [ERROR] Tests run: 6, Failures: 2, Errors: 0, Skipped: 0
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    This should be tested manually with the above command.
    ```
    [INFO] ------------------------------------------------------------------------
    [INFO] Reactor Summary for Spark Project Parent POM 3.1.0-SNAPSHOT:
    [INFO]
    [INFO] Spark Project Parent POM ........................... SUCCESS [  2.186 s]
    [INFO] Spark Project Tags ................................. SUCCESS [  4.400 s]
    [INFO] Spark Project Local DB ............................. SUCCESS [  1.744 s]
    [INFO] Spark Project Networking ........................... SUCCESS [  2.233 s]
    [INFO] Spark Project Shuffle Streaming Service ............ SUCCESS [  1.527 s]
    [INFO] Spark Project Unsafe ............................... SUCCESS [  5.564 s]
    [INFO] Spark Project Launcher ............................. SUCCESS [  1.946 s]
    [INFO] Spark Project Core ................................. SUCCESS [01:21 min]
    [INFO] ------------------------------------------------------------------------
    [INFO] BUILD SUCCESS
    [INFO] ------------------------------------------------------------------------
    [INFO] Total time:  01:41 min
    [INFO] Finished at: 2020-07-24T20:04:34-07:00
    [INFO] ------------------------------------------------------------------------
    ```
    
    Closes #29227 from dongjoon-hyun/SPARK-32434.
    
    Authored-by: Dongjoon Hyun <do...@apache.org>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 bin/load-spark-env.cmd                             | 42 +++++++++++-----------
 bin/load-spark-env.sh                              | 42 +++++++++++-----------
 .../spark/launcher/AbstractCommandBuilder.java     | 23 ++++++------
 3 files changed, 50 insertions(+), 57 deletions(-)

diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd
index 5f98cc3..ebbde66 100644
--- a/bin/load-spark-env.cmd
+++ b/bin/load-spark-env.cmd
@@ -37,26 +37,24 @@ if [%SPARK_ENV_LOADED%] == [] (
 
 rem Setting SPARK_SCALA_VERSION if not already set.
 
-rem TODO: revisit for Scala 2.13 support
-set SPARK_SCALA_VERSION=2.12
-rem if [%SPARK_SCALA_VERSION%] == [] (
-rem   set SCALA_VERSION_1=2.12
-rem   set SCALA_VERSION_2=2.11
-rem
-rem   set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
-rem   set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
-rem   set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
-rem   if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
-rem     echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
-rem     echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
-rem     echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
-rem     echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
-rem     exit 1
-rem   )
-rem   if exist %ASSEMBLY_DIR1% (
-rem     set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
-rem   ) else (
-rem     set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
-rem   )
-rem )
+if [%SPARK_SCALA_VERSION%] == [] (
+  set SCALA_VERSION_1=2.13
+  set SCALA_VERSION_2=2.12
+
+  set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
+  set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
+  set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
+  if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
+    echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
+    echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
+    echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
+    echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
+    exit 1
+  )
+  if exist %ASSEMBLY_DIR1% (
+    set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
+  ) else (
+    set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
+  )
+)
 exit /b 0
diff --git a/bin/load-spark-env.sh b/bin/load-spark-env.sh
index 107e799..04adaee 100644
--- a/bin/load-spark-env.sh
+++ b/bin/load-spark-env.sh
@@ -43,25 +43,23 @@ fi
 
 # Setting SPARK_SCALA_VERSION if not already set.
 
-# TODO: revisit for Scala 2.13 support
-export SPARK_SCALA_VERSION=2.12
-#if [ -z "$SPARK_SCALA_VERSION" ]; then
-#  SCALA_VERSION_1=2.12
-#  SCALA_VERSION_2=2.11
-#
-#  ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
-#  ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
-#  ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
-#  if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
-#    echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
-#    echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
-#    echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
-#    exit 1
-#  fi
-#
-#  if [[ -d "$ASSEMBLY_DIR_1" ]]; then
-#    export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
-#  else
-#    export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
-#  fi
-#fi
+if [ -z "$SPARK_SCALA_VERSION" ]; then
+  SCALA_VERSION_1=2.13
+  SCALA_VERSION_2=2.12
+
+  ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
+  ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
+  ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
+  if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
+    echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
+    echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
+    echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
+    exit 1
+  fi
+
+  if [[ -d "$ASSEMBLY_DIR_1" ]]; then
+    export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
+  else
+    export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
+  fi
+fi
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
index 3ae4633..778fd46 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java
@@ -235,20 +235,17 @@ abstract class AbstractCommandBuilder {
       return scala;
     }
     String sparkHome = getSparkHome();
-    // TODO: revisit for Scala 2.13 support
     File scala212 = new File(sparkHome, "launcher/target/scala-2.12");
-    // File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
-    // checkState(!scala212.isDirectory() || !scala211.isDirectory(),
-    //   "Presence of build for multiple Scala versions detected.\n" +
-    //   "Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
-    // if (scala212.isDirectory()) {
-    //   return "2.12";
-    // } else {
-    //   checkState(scala211.isDirectory(), "Cannot find any build directories.");
-    //   return "2.11";
-    // }
-    checkState(scala212.isDirectory(), "Cannot find any build directories.");
-    return "2.12";
+    File scala213 = new File(sparkHome, "launcher/target/scala-2.13");
+    checkState(!scala212.isDirectory() || !scala213.isDirectory(),
+      "Presence of build for multiple Scala versions detected.\n" +
+      "Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
+    if (scala213.isDirectory()) {
+      return "2.13";
+    } else {
+      checkState(scala212.isDirectory(), "Cannot find any build directories.");
+      return "2.12";
+    }
   }
 
   String getSparkHome() {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org