You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2017/06/29 08:59:40 UTC

spark git commit: [SPARK-21240] Fix code style for constructing and stopping a SparkContext in UT.

Repository: spark
Updated Branches:
  refs/heads/master 70085e83d -> d106a74c5


[SPARK-21240] Fix code style for constructing and stopping a SparkContext in UT.

## What changes were proposed in this pull request?

Same with SPARK-20985.
Fix code style for constructing and stopping a `SparkContext`. Assure the context is stopped to avoid other tests complain that there's only one `SparkContext` can exist.

Author: jinxing <ji...@126.com>

Closes #18454 from jinxing64/SPARK-21240.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d106a74c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d106a74c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d106a74c

Branch: refs/heads/master
Commit: d106a74c53f493c3c18741a9b19cb821dace4ba2
Parents: 70085e8
Author: jinxing <ji...@126.com>
Authored: Thu Jun 29 09:59:36 2017 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu Jun 29 09:59:36 2017 +0100

----------------------------------------------------------------------
 .../test/scala/org/apache/spark/scheduler/MapStatusSuite.scala | 6 ++----
 .../org/apache/spark/sql/execution/ui/SQLListenerSuite.scala   | 6 ++----
 2 files changed, 4 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d106a74c/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
index e612013..276169e 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
@@ -26,6 +26,7 @@ import org.roaringbitmap.RoaringBitmap
 
 import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkFunSuite}
 import org.apache.spark.internal.config
+import org.apache.spark.LocalSparkContext._
 import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
 import org.apache.spark.storage.BlockManagerId
 
@@ -160,12 +161,9 @@ class MapStatusSuite extends SparkFunSuite {
       .set("spark.serializer", classOf[KryoSerializer].getName)
       .setMaster("local")
       .setAppName("SPARK-21133")
-    val sc = new SparkContext(conf)
-    try {
+    withSpark(new SparkContext(conf)) { sc =>
       val count = sc.parallelize(0 until 3000, 10).repartition(2001).collect().length
       assert(count === 3000)
-    } finally {
-      sc.stop()
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/d106a74c/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
index e6cd41e..82eff5e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala
@@ -25,6 +25,7 @@ import org.mockito.Mockito.mock
 import org.apache.spark._
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.internal.config
+import org.apache.spark.LocalSparkContext._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.scheduler._
 import org.apache.spark.sql.{DataFrame, SparkSession}
@@ -496,8 +497,7 @@ class SQLListenerMemoryLeakSuite extends SparkFunSuite {
         .setAppName("test")
         .set(config.MAX_TASK_FAILURES, 1) // Don't retry the tasks to run this test quickly
         .set("spark.sql.ui.retainedExecutions", "50") // Set it to 50 to run this test quickly
-      val sc = new SparkContext(conf)
-      try {
+      withSpark(new SparkContext(conf)) { sc =>
         SparkSession.sqlListener.set(null)
         val spark = new SparkSession(sc)
         import spark.implicits._
@@ -522,8 +522,6 @@ class SQLListenerMemoryLeakSuite extends SparkFunSuite {
         assert(spark.sharedState.listener.executionIdToData.size <= 100)
         assert(spark.sharedState.listener.jobIdToExecutionId.size <= 100)
         assert(spark.sharedState.listener.stageIdToStageMetrics.size <= 100)
-      } finally {
-        sc.stop()
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org