You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2014/01/16 08:47:43 UTC

[2/3] git commit: Updated unit test comment

Updated unit test comment


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/718a13c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/718a13c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/718a13c1

Branch: refs/heads/master
Commit: 718a13c179915767107bc20cd27d9480d069231c
Parents: a268d63
Author: Kay Ousterhout <ka...@gmail.com>
Authored: Wed Jan 15 23:46:14 2014 -0800
Committer: Kay Ousterhout <ka...@gmail.com>
Committed: Wed Jan 15 23:46:14 2014 -0800

----------------------------------------------------------------------
 core/src/test/scala/org/apache/spark/DistributedSuite.scala | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/718a13c1/core/src/test/scala/org/apache/spark/DistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 27c4b01..8de7a32 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -127,7 +127,9 @@ class DistributedSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
 
   test("repeatedly failing task that crashes JVM") {
     // Ensures that if a task fails in a way that crashes the JVM, the job eventually fails rather
-    // than hanging.
+    // than hanging due to retrying the failed task infinitely many times (eventually the
+    // standalone scheduler will remove the application, causing the job to hang waiting to
+    // reconnect to the master).
     sc = new SparkContext(clusterUrl, "test")
     failAfter(Span(100000, Millis)) {
       val thrown = intercept[SparkException] {