You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/07/16 02:27:59 UTC
[spark] branch branch-3.2 updated: [SPARK-36166][TESTS] Support
Scala 2.13 test in `dev/run-tests.py`
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.2 by this push:
new 12c8c89 [SPARK-36166][TESTS] Support Scala 2.13 test in `dev/run-tests.py`
12c8c89 is described below
commit 12c8c89693d29c4e256d47a91e6fb92c1552b8ab
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Thu Jul 15 19:26:07 2021 -0700
[SPARK-36166][TESTS] Support Scala 2.13 test in `dev/run-tests.py`
### What changes were proposed in this pull request?
For Apache Spark 3.2, this PR aims to support Scala 2.13 test in `dev/run-tests.py` by adding `SCALA_PROFILE` and in `dev/run-tests-jenkins.py` by adding `AMPLAB_JENKINS_BUILD_SCALA_PROFILE`.
In addition, `test-dependencies.sh` is skipped for Scala 2.13 because we currently don't maintain the dependency manifests yet. This will be handled after Apache Spark 3.2.0 release.
### Why are the changes needed?
To test Scala 2.13 with `dev/run-tests.py`.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Manual. The following is the result. Note that this PR aims to **run** Scala 2.13 tests instead of **passing** them. We will have daily GitHub Action job via #33358 and will fix UT failures if exists.
```
$ dev/change-scala-version.sh 2.13
$ SCALA_PROFILE=scala2.13 dev/run-tests.py
...
========================================================================
Running Scala style checks
========================================================================
[info] Checking Scala style using SBT with these profiles: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pkubernetes -Phadoop-cloud -Phive -Phive-thriftserver -Pyarn -Pmesos -Pdocker-integration-tests -Pkinesis-asl -Pspark-ganglia-lgpl
...
========================================================================
Building Spark
========================================================================
[info] Building Spark using SBT with these arguments: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pspark-ganglia-lgpl -Pmesos -Pyarn -Phive-thriftserver -Pkinesis-asl -Pkubernetes -Pdocker-integration-tests -Phive -Phadoop-cloud test:package streaming-kinesis-asl-assembly/assembly
...
[info] Building Spark assembly using SBT with these arguments: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pspark-ganglia-lgpl -Pmesos -Pyarn -Phive-thriftserver -Pkinesis-asl -Pkubernetes -Pdocker-integration-tests -Phive -Phadoop-cloud assembly/package
...
========================================================================
Running Java style checks
========================================================================
[info] Checking Java style using SBT with these profiles: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pspark-ganglia-lgpl -Pmesos -Pyarn -Phive-thriftserver -Pkinesis-asl -Pkubernetes -Pdocker-integration-tests -Phive -Phadoop-cloud
...
========================================================================
Building Unidoc API Documentation
========================================================================
[info] Building Spark unidoc using SBT with these arguments: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pspark-ganglia-lgpl -Pmesos -Pyarn -Phive-thriftserver -Pkinesis-asl -Pkubernetes -Pdocker-integration-tests -Phive -Phadoop-cloud unidoc
...
========================================================================
Running Spark unit tests
========================================================================
[info] Running Spark tests using SBT with these arguments: -Phadoop-3.2 -Phive-2.3 -Pscala-2.13 -Pspark-ganglia-lgpl -Pmesos -Pyarn -Phive-thriftserver -Pkinesis-asl -Pkubernetes -Pdocker-integration-tests -Phive -Phadoop-cloud test
...
```
Closes #33376 from dongjoon-hyun/SPARK-36166.
Authored-by: Dongjoon Hyun <do...@apache.org>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
(cherry picked from commit f66153de787cb3c51c40032c3a5aba3a2eb84680)
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
dev/run-tests-jenkins.py | 3 +++
dev/run-tests.py | 23 ++++++++++++++++++++++-
dev/test-dependencies.sh | 10 ++++++++++
3 files changed, 35 insertions(+), 1 deletion(-)
diff --git a/dev/run-tests-jenkins.py b/dev/run-tests-jenkins.py
index 4309a74..f24e702 100755
--- a/dev/run-tests-jenkins.py
+++ b/dev/run-tests-jenkins.py
@@ -177,6 +177,9 @@ def main():
# Switch the Hive profile based on the PR title:
if "test-hive2.3" in ghprb_pull_title:
os.environ["AMPLAB_JENKINS_BUILD_HIVE_PROFILE"] = "hive2.3"
+ # Switch the Scala profile based on the PR title:
+ if "test-scala2.13" in ghprb_pull_title:
+ os.environ["AMPLAB_JENKINS_BUILD_SCALA_PROFILE"] = "scala2.13"
build_display_name = os.environ["BUILD_DISPLAY_NAME"]
build_url = os.environ["BUILD_URL"]
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 5243dce..3055dcc 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -294,6 +294,24 @@ def exec_sbt(sbt_args=()):
exit_from_command_with_retcode(sbt_cmd, retcode)
+def get_scala_profiles(scala_version):
+ """
+ For the given Scala version tag, return a list of Maven/SBT profile flags for
+ building and testing against that Scala version.
+ """
+ sbt_maven_scala_profiles = {
+ "scala2.12": ["-Pscala-2.12"],
+ "scala2.13": ["-Pscala-2.13"],
+ }
+
+ if scala_version in sbt_maven_scala_profiles:
+ return sbt_maven_scala_profiles[scala_version]
+ else:
+ print("[error] Could not find", scala_version, "in the list. Valid options",
+ " are", sbt_maven_scala_profiles.keys())
+ sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
+
+
def get_hadoop_profiles(hadoop_version):
"""
For the given Hadoop version tag, return a list of Maven/SBT profile flags for
@@ -629,6 +647,7 @@ def main():
# if we're on the Amplab Jenkins build servers setup variables
# to reflect the environment settings
build_tool = os.environ.get("AMPLAB_JENKINS_BUILD_TOOL", "sbt")
+ scala_version = os.environ.get("AMPLAB_JENKINS_BUILD_SCALA_PROFILE", "scala2.12")
hadoop_version = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE", "hadoop3.2")
hive_version = os.environ.get("AMPLAB_JENKINS_BUILD_HIVE_PROFILE", "hive2.3")
test_env = "amplab_jenkins"
@@ -639,6 +658,7 @@ def main():
else:
# else we're running locally or GitHub Actions.
build_tool = "sbt"
+ scala_version = os.environ.get("SCALA_PROFILE", "scala2.12")
hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3.2")
hive_version = os.environ.get("HIVE_PROFILE", "hive2.3")
if "GITHUB_ACTIONS" in os.environ:
@@ -648,7 +668,8 @@ def main():
print("[info] Using build tool", build_tool, "with Hadoop profile", hadoop_version,
"and Hive profile", hive_version, "under environment", test_env)
- extra_profiles = get_hadoop_profiles(hadoop_version) + get_hive_profiles(hive_version)
+ extra_profiles = get_hadoop_profiles(hadoop_version) + get_hive_profiles(hive_version) + \
+ get_scala_profiles(scala_version)
changed_modules = []
changed_files = []
diff --git a/dev/test-dependencies.sh b/dev/test-dependencies.sh
index de659e9..22619c6 100755
--- a/dev/test-dependencies.sh
+++ b/dev/test-dependencies.sh
@@ -51,6 +51,16 @@ if [ $? != 0 ]; then
echo -e "Error while getting version string from Maven:\n$OLD_VERSION"
exit 1
fi
+SCALA_BINARY_VERSION=$($MVN -q \
+ -Dexec.executable="echo" \
+ -Dexec.args='${scala.binary.version}' \
+ --non-recursive \
+ org.codehaus.mojo:exec-maven-plugin:1.6.0:exec | grep -E '[0-9]+\.[0-9]+')
+if [[ "$SCALA_BINARY_VERSION" != "2.12" ]]; then
+ # TODO(SPARK-36168) Support Scala 2.13 in dev/test-dependencies.sh
+ echo "Skip dependency testing on $SCALA_BINARY_VERSION"
+ exit 0
+fi
set -e
TEMP_VERSION="spark-$(python3 -S -c "import random; print(random.randrange(100000, 999999))")"
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org