You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2016/09/12 22:24:42 UTC

spark git commit: [SPARK-14818] Post-2.0 MiMa exclusion and build changes

Repository: spark
Updated Branches:
  refs/heads/master 3d40896f4 -> 7c51b99a4


[SPARK-14818] Post-2.0 MiMa exclusion and build changes

This patch makes a handful of post-Spark-2.0 MiMa exclusion and build updates. It should be merged to master and a subset of it should be picked into branch-2.0 in order to test Spark 2.0.1-SNAPSHOT.

- Remove the ` sketch`, `mllibLocal`, and `streamingKafka010` from the list of excluded subprojects so that MiMa checks them.
- Remove now-unnecessary special-case handling of the Kafka 0.8 artifact in `mimaSettings`.
- Move the exclusion added in SPARK-14743 from `v20excludes` to `v21excludes`, since that patch was only merged into master and not branch-2.0.
- Add exclusions for an API change introduced by SPARK-17096 / #14675.
- Add missing exclusions for the `o.a.spark.internal` and `o.a.spark.sql.internal` packages.

Author: Josh Rosen <jo...@databricks.com>

Closes #15061 from JoshRosen/post-2.0-mima-changes.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7c51b99a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7c51b99a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7c51b99a

Branch: refs/heads/master
Commit: 7c51b99a428a965ff7d136e1cdda20305d260453
Parents: 3d40896
Author: Josh Rosen <jo...@databricks.com>
Authored: Mon Sep 12 15:24:33 2016 -0700
Committer: Josh Rosen <jo...@databricks.com>
Committed: Mon Sep 12 15:24:33 2016 -0700

----------------------------------------------------------------------
 project/MimaBuild.scala    | 11 ++---------
 project/MimaExcludes.scala | 12 +++++++++---
 project/SparkBuild.scala   |  2 +-
 3 files changed, 12 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7c51b99a/project/MimaBuild.scala
----------------------------------------------------------------------
diff --git a/project/MimaBuild.scala b/project/MimaBuild.scala
index 2a989dd..77397ea 100644
--- a/project/MimaBuild.scala
+++ b/project/MimaBuild.scala
@@ -88,15 +88,8 @@ object MimaBuild {
 
   def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
     val organization = "org.apache.spark"
-    val previousSparkVersion = "1.6.0"
-    // This check can be removed post-2.0
-    val project = if (previousSparkVersion == "1.6.0" &&
-      projectRef.project == "streaming-kafka-0-8"
-    ) {
-      "streaming-kafka"
-    } else {
-      projectRef.project
-    }
+    val previousSparkVersion = "2.0.0"
+    val project = projectRef.project
     val fullId = "spark-" + project + "_2.11"
     mimaDefaultSettings ++
     Seq(previousArtifact := Some(organization % fullId % previousSparkVersion),

http://git-wip-us.apache.org/repos/asf/spark/blob/7c51b99a/project/MimaExcludes.scala
----------------------------------------------------------------------
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 16f26e7..fbd78ae 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -37,6 +37,8 @@ object MimaExcludes {
   // Exclude rules for 2.1.x
   lazy val v21excludes = v20excludes ++ {
     Seq(
+      // [SPARK-14743] Improve delegation token handling in secure cluster
+      ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.deploy.SparkHadoopUtil.getTimeFromNowToRenewal"),
       // [SPARK-16199][SQL] Add a method to list the referenced columns in data source Filter
       ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.sources.Filter.references"),
       // [SPARK-16853][SQL] Fixes encoder error in DataSet typed select
@@ -51,12 +53,15 @@ object MimaExcludes {
     Seq(
       excludePackage("org.apache.spark.rpc"),
       excludePackage("org.spark-project.jetty"),
+      excludePackage("org.spark_project.jetty"),
+      excludePackage("org.apache.spark.internal"),
       excludePackage("org.apache.spark.unused"),
       excludePackage("org.apache.spark.unsafe"),
       excludePackage("org.apache.spark.memory"),
       excludePackage("org.apache.spark.util.collection.unsafe"),
       excludePackage("org.apache.spark.sql.catalyst"),
       excludePackage("org.apache.spark.sql.execution"),
+      excludePackage("org.apache.spark.sql.internal"),
       ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.mllib.feature.PCAModel.this"),
       ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.status.api.v1.StageData.this"),
       ProblemFilters.exclude[MissingMethodProblem](
@@ -787,9 +792,10 @@ object MimaExcludes {
       ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.SQLContext.parquetFile"),
       ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.SQLContext.applySchema")
     ) ++ Seq(
-        // [SPARK-14743] Improve delegation token handling in secure cluster
-        ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.deploy.SparkHadoopUtil.getTimeFromNowToRenewal")
-      )
+      // SPARK-17096: Improve exception string reported through the StreamingQueryListener
+      ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.streaming.StreamingQueryListener#QueryTerminated.stackTrace"),
+      ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.streaming.StreamingQueryListener#QueryTerminated.this")
+    )
   }
 
   def excludes(version: String) = version match {

http://git-wip-us.apache.org/repos/asf/spark/blob/7c51b99a/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d164ead..a39c93e 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -353,7 +353,7 @@ object SparkBuild extends PomBuild {
   val mimaProjects = allProjects.filterNot { x =>
     Seq(
       spark, hive, hiveThriftServer, catalyst, repl, networkCommon, networkShuffle, networkYarn,
-      unsafe, tags, sketch, mllibLocal, streamingKafka010
+      unsafe, tags
     ).contains(x)
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org