You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2021/01/18 21:45:37 UTC

[spark] branch master updated: [SPARK-31168][SPARK-33913][BUILD] Upgrade Scala to 2.12.13 and Kafka to 2.7.0

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a65e86a  [SPARK-31168][SPARK-33913][BUILD] Upgrade Scala to 2.12.13 and Kafka to 2.7.0
a65e86a is described below

commit a65e86a65e39f3a61c3248b006e897effd7e4c2a
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Mon Jan 18 13:45:06 2021 -0800

    [SPARK-31168][SPARK-33913][BUILD] Upgrade Scala to 2.12.13 and Kafka to 2.7.0
    
    ### What changes were proposed in this pull request?
    
    This PR is the 3rd try to upgrade Scala 2.12.x in order to see the feasibility.
    - https://github.com/apache/spark/pull/27929 (Upgrade Scala to 2.12.11, wangyum )
    - https://github.com/apache/spark/pull/30940 (Upgrade Scala to 2.12.12, viirya )
    
    `silencer` library is updated accordingly. And, Kafka version upgrade is required because it fails like the following.
    ```
    [info] KafkaDataConsumerSuite:
    [info] org.apache.spark.streaming.kafka010.KafkaDataConsumerSuite *** ABORTED *** (1 second, 580 milliseconds)
    [info]   java.lang.NoClassDefFoundError: scala/math/Ordering$$anon$7
    [info]   at kafka.api.ApiVersion$.orderingByVersion(ApiVersion.scala:45)
    ```
    
    ### Why are the changes needed?
    
    Apache Spark was stuck to 2.12.10 due to the regression in Scala 2.12.11 and 2.12.12. This will bring all the bug fixes.
    - https://github.com/scala/scala/releases/tag/v2.12.13
    - https://github.com/scala/scala/releases/tag/v2.12.12
    - https://github.com/scala/scala/releases/tag/v2.12.11
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, but this is a bug-fixed version.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    Closes #31223 from dongjoon-hyun/SPARK-31168.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 dev/deps/spark-deps-hadoop-2.7-hive-2.3                             | 6 +++---
 dev/deps/spark-deps-hadoop-3.2-hive-2.3                             | 6 +++---
 docs/_config.yml                                                    | 2 +-
 .../test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala   | 4 ++--
 pom.xml                                                             | 6 +++---
 project/SparkBuild.scala                                            | 2 +-
 6 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/dev/deps/spark-deps-hadoop-2.7-hive-2.3 b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
index 1a89da4..c3f7b83 100644
--- a/dev/deps/spark-deps-hadoop-2.7-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
@@ -212,10 +212,10 @@ protobuf-java/2.5.0//protobuf-java-2.5.0.jar
 py4j/0.10.9.1//py4j-0.10.9.1.jar
 pyrolite/4.30//pyrolite-4.30.jar
 scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar
-scala-compiler/2.12.10//scala-compiler-2.12.10.jar
-scala-library/2.12.10//scala-library-2.12.10.jar
+scala-compiler/2.12.13//scala-compiler-2.12.13.jar
+scala-library/2.12.13//scala-library-2.12.13.jar
 scala-parser-combinators_2.12/1.1.2//scala-parser-combinators_2.12-1.1.2.jar
-scala-reflect/2.12.10//scala-reflect-2.12.10.jar
+scala-reflect/2.12.13//scala-reflect-2.12.13.jar
 scala-xml_2.12/1.2.0//scala-xml_2.12-1.2.0.jar
 shapeless_2.12/2.3.3//shapeless_2.12-2.3.3.jar
 shims/0.9.0//shims-0.9.0.jar
diff --git a/dev/deps/spark-deps-hadoop-3.2-hive-2.3 b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
index 562a436..14c44f3 100644
--- a/dev/deps/spark-deps-hadoop-3.2-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
@@ -182,10 +182,10 @@ protobuf-java/2.5.0//protobuf-java-2.5.0.jar
 py4j/0.10.9.1//py4j-0.10.9.1.jar
 pyrolite/4.30//pyrolite-4.30.jar
 scala-collection-compat_2.12/2.1.1//scala-collection-compat_2.12-2.1.1.jar
-scala-compiler/2.12.10//scala-compiler-2.12.10.jar
-scala-library/2.12.10//scala-library-2.12.10.jar
+scala-compiler/2.12.13//scala-compiler-2.12.13.jar
+scala-library/2.12.13//scala-library-2.12.13.jar
 scala-parser-combinators_2.12/1.1.2//scala-parser-combinators_2.12-1.1.2.jar
-scala-reflect/2.12.10//scala-reflect-2.12.10.jar
+scala-reflect/2.12.13//scala-reflect-2.12.13.jar
 scala-xml_2.12/1.2.0//scala-xml_2.12-1.2.0.jar
 shapeless_2.12/2.3.3//shapeless_2.12-2.3.3.jar
 shims/0.9.0//shims-0.9.0.jar
diff --git a/docs/_config.yml b/docs/_config.yml
index a8d42e4..0442647 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -22,7 +22,7 @@ include:
 SPARK_VERSION: 3.2.0-SNAPSHOT
 SPARK_VERSION_SHORT: 3.2.0
 SCALA_BINARY_VERSION: "2.12"
-SCALA_VERSION: "2.12.10"
+SCALA_VERSION: "2.12.13"
 MESOS_VERSION: 1.0.0
 SPARK_ISSUE_TRACKER_URL: https://issues.apache.org/jira/browse/SPARK
 SPARK_GITHUB_URL: https://github.com/apache/spark
diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
index 43ed4a8..6a4990e 100644
--- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
+++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
@@ -574,11 +574,11 @@ class KafkaTestUtils(
       s"topic $topic still exists in the replica manager")
     // ensure that logs from all replicas are deleted if delete topic is marked successful
     assert(servers.forall(server => topicAndPartitions.forall(tp =>
-      server.getLogManager().getLog(tp).isEmpty)),
+      server.getLogManager.getLog(tp).isEmpty)),
       s"topic $topic still exists in log manager")
     // ensure that topic is removed from all cleaner offsets
     assert(servers.forall(server => topicAndPartitions.forall { tp =>
-      val checkpoints = server.getLogManager().liveLogDirs.map { logDir =>
+      val checkpoints = server.getLogManager.liveLogDirs.map { logDir =>
         new OffsetCheckpointFile(new File(logDir, "cleaner-offset-checkpoint")).read()
       }
       checkpoints.forall(checkpointsPerLogDir => !checkpointsPerLogDir.contains(tp))
diff --git a/pom.xml b/pom.xml
index 3a20427..5f266a1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,7 +133,7 @@
     <!-- Version used for internal directory structure -->
     <hive.version.short>2.3</hive.version.short>
     <!-- note that this should be compatible with Kafka brokers version 0.10 and up -->
-    <kafka.version>2.6.0</kafka.version>
+    <kafka.version>2.7.0</kafka.version>
     <!-- After 10.15.1.3, the minimum required version is JDK9 -->
     <derby.version>10.14.2.0</derby.version>
     <parquet.version>1.10.1</parquet.version>
@@ -163,7 +163,7 @@
     <commons.math3.version>3.4.1</commons.math3.version>
     <!-- managed up from 3.2.1 for SPARK-11652 -->
     <commons.collections.version>3.2.2</commons.collections.version>
-    <scala.version>2.12.10</scala.version>
+    <scala.version>2.12.13</scala.version>
     <scala.binary.version>2.12</scala.binary.version>
     <scalatest-maven-plugin.version>2.0.0</scalatest-maven-plugin.version>
     <scalafmt.parameters>--test</scalafmt.parameters>
@@ -2580,7 +2580,7 @@
               <compilerPlugin>
                 <groupId>com.github.ghik</groupId>
                 <artifactId>silencer-plugin_${scala.version}</artifactId>
-                <version>1.6.0</version>
+                <version>1.7.1</version>
               </compilerPlugin>
             </compilerPlugins>
           </configuration>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 668701b..817ab90 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -206,7 +206,7 @@ object SparkBuild extends PomBuild {
   lazy val compilerWarningSettings: Seq[sbt.Def.Setting[_]] = Seq(
     libraryDependencies ++= {
       if (VersionNumber(scalaVersion.value).matchesSemVer(SemanticSelector("<2.13.2"))) {
-        val silencerVersion = if (scalaBinaryVersion.value == "2.13") "1.7.1" else "1.6.0"
+        val silencerVersion = "1.7.1"
         Seq(
           "org.scala-lang.modules" %% "scala-collection-compat" % "2.2.0",
           compilerPlugin("com.github.ghik" % "silencer-plugin" % silencerVersion cross CrossVersion.full),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org