You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by zs...@apache.org on 2016/10/26 17:28:13 UTC

spark git commit: [SPARK-16304] LinkageError should not crash Spark executor

Repository: spark
Updated Branches:
  refs/heads/branch-2.0 b4a7b6551 -> 773fbfef1


[SPARK-16304] LinkageError should not crash Spark executor

## What changes were proposed in this pull request?
This patch updates the failure handling logic so Spark executor does not crash when seeing LinkageError.

## How was this patch tested?
Added an end-to-end test in FailureSuite.

Author: petermaxlee <pe...@gmail.com>

Closes #13982 from petermaxlee/SPARK-16304.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/773fbfef
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/773fbfef
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/773fbfef

Branch: refs/heads/branch-2.0
Commit: 773fbfef1929b64229fbf97a91c45cdb1ec1fb1f
Parents: b4a7b65
Author: petermaxlee <pe...@gmail.com>
Authored: Wed Jul 6 10:46:22 2016 -0700
Committer: Shixiong Zhu <sh...@databricks.com>
Committed: Wed Oct 26 10:27:54 2016 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/util/Utils.scala   | 6 +++++-
 core/src/test/scala/org/apache/spark/FailureSuite.scala | 9 +++++++++
 2 files changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/773fbfef/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 1686edb..b9cf721 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1977,7 +1977,11 @@ private[spark] object Utils extends Logging {
   /** Returns true if the given exception was fatal. See docs for scala.util.control.NonFatal. */
   def isFatalError(e: Throwable): Boolean = {
     e match {
-      case NonFatal(_) | _: InterruptedException | _: NotImplementedError | _: ControlThrowable =>
+      case NonFatal(_) |
+           _: InterruptedException |
+           _: NotImplementedError |
+           _: ControlThrowable |
+           _: LinkageError =>
         false
       case _ =>
         true

http://git-wip-us.apache.org/repos/asf/spark/blob/773fbfef/core/src/test/scala/org/apache/spark/FailureSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 132f636..d805c67 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -253,6 +253,15 @@ class FailureSuite extends SparkFunSuite with LocalSparkContext {
     rdd.count()
   }
 
+  test("SPARK-16304: Link error should not crash executor") {
+    sc = new SparkContext("local[1,2]", "test")
+    intercept[SparkException] {
+      sc.parallelize(1 to 2).foreach { i =>
+        throw new LinkageError()
+      }
+    }
+  }
+
   // TODO: Need to add tests with shuffle fetch failures.
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org