You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2021/07/19 09:02:08 UTC
[spark] branch branch-3.2 updated: [SPARK-36166][TESTS][FOLLOW-UP]
Add Scala version change logic into testing script
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.2 by this push:
new 2ae7757 [SPARK-36166][TESTS][FOLLOW-UP] Add Scala version change logic into testing script
2ae7757 is described below
commit 2ae77574dc6ef67635a37cee1589296f5ff4f67f
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Mon Jul 19 18:01:02 2021 +0900
[SPARK-36166][TESTS][FOLLOW-UP] Add Scala version change logic into testing script
### What changes were proposed in this pull request?
This PR is a simple followup from https://github.com/apache/spark/pull/33376:
- It simplifies a bit by removing the default Scala version in the testing script (so we don't have to change here in the future when we change the Scala default version).
- Call `change-scala-version.sh` script (when `SCALA_PROFILE` is explicitly specified)
### Why are the changes needed?
More refactoring. In addition, this change will be used at https://github.com/apache/spark/pull/33410
### Does this PR introduce _any_ user-facing change?
No, dev-only.
### How was this patch tested?
CI in this PR should test it out.
Closes #33411 from HyukjinKwon/SPARK-36166.
Authored-by: Hyukjin Kwon <gu...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
(cherry picked from commit 8ee199ef42ee3d9f5696c1a5ab7ecbfd7046d61e)
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
dev/run-tests.py | 29 +++++++++++++++++++++++++----
1 file changed, 25 insertions(+), 4 deletions(-)
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 97523e7..2f05077 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -299,6 +299,9 @@ def get_scala_profiles(scala_version):
For the given Scala version tag, return a list of Maven/SBT profile flags for
building and testing against that Scala version.
"""
+ if scala_version is None:
+ return [] # assume it's default.
+
sbt_maven_scala_profiles = {
"scala2.12": ["-Pscala-2.12"],
"scala2.13": ["-Pscala-2.13"],
@@ -312,6 +315,19 @@ def get_scala_profiles(scala_version):
sys.exit(int(os.environ.get("CURRENT_BLOCK", 255)))
+def switch_scala_version(scala_version):
+ """
+ Switch the code base to use the given Scala version.
+ """
+ set_title_and_block(
+ "Switch the Scala version to %s" % scala_version, "BLOCK_SCALA_VERSION")
+
+ assert scala_version is not None
+ ver_num = scala_version[-4:] # Simply extract. e.g.) 2.13 from scala2.13
+ command = [os.path.join(SPARK_HOME, "dev", "change-scala-version.sh"), ver_num]
+ run_cmd(command)
+
+
def get_hadoop_profiles(hadoop_version):
"""
For the given Hadoop version tag, return a list of Maven/SBT profile flags for
@@ -647,7 +663,7 @@ def main():
# if we're on the Amplab Jenkins build servers setup variables
# to reflect the environment settings
build_tool = os.environ.get("AMPLAB_JENKINS_BUILD_TOOL", "sbt")
- scala_version = os.environ.get("AMPLAB_JENKINS_BUILD_SCALA_PROFILE", "scala2.12")
+ scala_version = os.environ.get("AMPLAB_JENKINS_BUILD_SCALA_PROFILE")
hadoop_version = os.environ.get("AMPLAB_JENKINS_BUILD_PROFILE", "hadoop3.2")
hive_version = os.environ.get("AMPLAB_JENKINS_BUILD_HIVE_PROFILE", "hive2.3")
test_env = "amplab_jenkins"
@@ -658,7 +674,7 @@ def main():
else:
# else we're running locally or GitHub Actions.
build_tool = "sbt"
- scala_version = os.environ.get("SCALA_PROFILE", "scala2.12")
+ scala_version = os.environ.get("SCALA_PROFILE")
hadoop_version = os.environ.get("HADOOP_PROFILE", "hadoop3.2")
hive_version = os.environ.get("HIVE_PROFILE", "hive2.3")
if "GITHUB_ACTIONS" in os.environ:
@@ -666,11 +682,12 @@ def main():
else:
test_env = "local"
- print("[info] Using build tool", build_tool, "with Hadoop profile", hadoop_version,
- "and Hive profile", hive_version, "under environment", test_env)
extra_profiles = get_hadoop_profiles(hadoop_version) + get_hive_profiles(hive_version) + \
get_scala_profiles(scala_version)
+ print("[info] Using build tool", build_tool, "with profiles",
+ *(extra_profiles + ["under environment", test_env]))
+
changed_modules = []
changed_files = []
included_tags = []
@@ -733,6 +750,10 @@ def main():
test_environ.update(m.environ)
setup_test_environ(test_environ)
+ if scala_version is not None:
+ # If not set, assume this is default and doesn't need to change.
+ switch_scala_version(scala_version)
+
should_run_java_style_checks = False
if not should_only_test_modules:
# license checks
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org