You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2016/02/18 01:17:35 UTC

spark git commit: [SPARK-13344][TEST] Fix harmless accumulator not found exceptions

Repository: spark
Updated Branches:
  refs/heads/master 97ee85daf -> 9451fed52


[SPARK-13344][TEST] Fix harmless accumulator not found exceptions

See [JIRA](https://issues.apache.org/jira/browse/SPARK-13344) for more detail. This was caused by #10835.

Author: Andrew Or <an...@databricks.com>

Closes #11222 from andrewor14/fix-test-accum-exceptions.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9451fed5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9451fed5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9451fed5

Branch: refs/heads/master
Commit: 9451fed52cb8a00c706b582a0b51d8cd832f9350
Parents: 97ee85d
Author: Andrew Or <an...@databricks.com>
Authored: Wed Feb 17 16:17:20 2016 -0800
Committer: Josh Rosen <jo...@databricks.com>
Committed: Wed Feb 17 16:17:20 2016 -0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/AccumulatorSuite.scala |  8 ++++++++
 .../apache/spark/InternalAccumulatorSuite.scala   |  8 ++++++++
 .../scala/org/apache/spark/SparkFunSuite.scala    | 18 ++++++++++++++----
 3 files changed, 30 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9451fed5/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 4d49fe5..8acd043 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -34,6 +34,14 @@ import org.apache.spark.serializer.JavaSerializer
 class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
   import AccumulatorParam._
 
+  override def afterEach(): Unit = {
+    try {
+      Accumulators.clear()
+    } finally {
+      super.afterEach()
+    }
+  }
+
   implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =
     new AccumulableParam[mutable.Set[A], A] {
       def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]) : mutable.Set[A] = {

http://git-wip-us.apache.org/repos/asf/spark/blob/9451fed5/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
index c426bb7..4745506 100644
--- a/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/InternalAccumulatorSuite.scala
@@ -28,6 +28,14 @@ class InternalAccumulatorSuite extends SparkFunSuite with LocalSparkContext {
   import InternalAccumulator._
   import AccumulatorParam._
 
+  override def afterEach(): Unit = {
+    try {
+      Accumulators.clear()
+    } finally {
+      super.afterEach()
+    }
+  }
+
   test("get param") {
     assert(getParam(EXECUTOR_DESERIALIZE_TIME) === LongAccumulatorParam)
     assert(getParam(EXECUTOR_RUN_TIME) === LongAccumulatorParam)

http://git-wip-us.apache.org/repos/asf/spark/blob/9451fed5/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index d3359c7..99366a3 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -18,14 +18,26 @@
 package org.apache.spark
 
 // scalastyle:off
-import org.scalatest.{FunSuite, Outcome}
+import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}
 
 /**
  * Base abstract class for all unit tests in Spark for handling common functionality.
  */
-private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
+private[spark] abstract class SparkFunSuite
+  extends FunSuite
+  with BeforeAndAfterAll
+  with Logging {
 // scalastyle:on
 
+  protected override def afterAll(): Unit = {
+    try {
+      // Avoid leaking map entries in tests that use accumulators without SparkContext
+      Accumulators.clear()
+    } finally {
+      super.afterAll()
+    }
+  }
+
   /**
    * Log the suite name and the test name before and after each test.
    *
@@ -42,8 +54,6 @@ private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
       test()
     } finally {
       logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
-      // Avoid leaking map entries in tests that use accumulators without SparkContext
-      Accumulators.clear()
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org