You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/07/09 03:11:09 UTC
spark git commit: [SPARK-21345][SQL][TEST][TEST-MAVEN][BRANCH-2.1]
SparkSessionBuilderSuite should clean up stopped sessions.
Repository: spark
Updated Branches:
refs/heads/branch-2.1 7f7b63bb6 -> 5e2bfd5bc
[SPARK-21345][SQL][TEST][TEST-MAVEN][BRANCH-2.1] SparkSessionBuilderSuite should clean up stopped sessions.
## What changes were proposed in this pull request?
`SparkSessionBuilderSuite` should clean up stopped sessions. Otherwise, it leaves behind some stopped `SparkContext`s interfereing with other test suites using `ShardSQLContext`.
Recently, master branch fails consequtively.
- https://amplab.cs.berkeley.edu/jenkins/view/Spark%20QA%20Test%20(Dashboard)/
## How was this patch tested?
Pass the Jenkins with a updated suite.
Author: Dongjoon Hyun <do...@apache.org>
Closes #18572 from dongjoon-hyun/SPARK-21345-BRANCH-2.1.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5e2bfd5b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5e2bfd5b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5e2bfd5b
Branch: refs/heads/branch-2.1
Commit: 5e2bfd5bca14d604270e8bd3018f5771b83ea07f
Parents: 7f7b63b
Author: Dongjoon Hyun <do...@apache.org>
Authored: Sun Jul 9 11:11:02 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Sun Jul 9 11:11:02 2017 +0800
----------------------------------------------------------------------
.../spark/sql/SparkSessionBuilderSuite.scala | 44 ++++++++------------
1 file changed, 18 insertions(+), 26 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/5e2bfd5b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 386d13d..1c6afa5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -17,49 +17,48 @@
package org.apache.spark.sql
+import org.scalatest.BeforeAndAfterEach
+
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
/**
* Test cases for the builder pattern of [[SparkSession]].
*/
-class SparkSessionBuilderSuite extends SparkFunSuite {
+class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach {
- private var initialSession: SparkSession = _
+ override def afterEach(): Unit = {
+ // This suite should not interfere with the other test suites.
+ SparkSession.getActiveSession.foreach(_.stop())
+ SparkSession.clearActiveSession()
+ SparkSession.getDefaultSession.foreach(_.stop())
+ SparkSession.clearDefaultSession()
+ }
- private lazy val sparkContext: SparkContext = {
- initialSession = SparkSession.builder()
+ test("create with config options and propagate them to SparkContext and SparkSession") {
+ val session = SparkSession.builder()
.master("local")
.config("spark.ui.enabled", value = false)
.config("some-config", "v2")
.getOrCreate()
- initialSession.sparkContext
- }
-
- test("create with config options and propagate them to SparkContext and SparkSession") {
- // Creating a new session with config - this works by just calling the lazy val
- sparkContext
- assert(initialSession.sparkContext.conf.get("some-config") == "v2")
- assert(initialSession.conf.get("some-config") == "v2")
- SparkSession.clearDefaultSession()
+ assert(session.sparkContext.conf.get("some-config") == "v2")
+ assert(session.conf.get("some-config") == "v2")
}
test("use global default session") {
- val session = SparkSession.builder().getOrCreate()
+ val session = SparkSession.builder().master("local").getOrCreate()
assert(SparkSession.builder().getOrCreate() == session)
- SparkSession.clearDefaultSession()
}
test("config options are propagated to existing SparkSession") {
- val session1 = SparkSession.builder().config("spark-config1", "a").getOrCreate()
+ val session1 = SparkSession.builder().master("local").config("spark-config1", "a").getOrCreate()
assert(session1.conf.get("spark-config1") == "a")
val session2 = SparkSession.builder().config("spark-config1", "b").getOrCreate()
assert(session1 == session2)
assert(session1.conf.get("spark-config1") == "b")
- SparkSession.clearDefaultSession()
}
test("use session from active thread session and propagate config options") {
- val defaultSession = SparkSession.builder().getOrCreate()
+ val defaultSession = SparkSession.builder().master("local").getOrCreate()
val activeSession = defaultSession.newSession()
SparkSession.setActiveSession(activeSession)
val session = SparkSession.builder().config("spark-config2", "a").getOrCreate()
@@ -70,16 +69,14 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
SparkSession.clearActiveSession()
assert(SparkSession.builder().getOrCreate() == defaultSession)
- SparkSession.clearDefaultSession()
}
test("create a new session if the default session has been stopped") {
- val defaultSession = SparkSession.builder().getOrCreate()
+ val defaultSession = SparkSession.builder().master("local").getOrCreate()
SparkSession.setDefaultSession(defaultSession)
defaultSession.stop()
val newSession = SparkSession.builder().master("local").getOrCreate()
assert(newSession != defaultSession)
- newSession.stop()
}
test("create a new session if the active thread session has been stopped") {
@@ -88,11 +85,9 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
activeSession.stop()
val newSession = SparkSession.builder().master("local").getOrCreate()
assert(newSession != activeSession)
- newSession.stop()
}
test("create SparkContext first then SparkSession") {
- sparkContext.stop()
val conf = new SparkConf().setAppName("test").setMaster("local").set("key1", "value1")
val sparkContext2 = new SparkContext(conf)
val session = SparkSession.builder().config("key2", "value2").getOrCreate()
@@ -101,14 +96,12 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
assert(session.sparkContext.conf.get("key1") == "value1")
assert(session.sparkContext.conf.get("key2") == "value2")
assert(session.sparkContext.conf.get("spark.app.name") == "test")
- session.stop()
}
test("SPARK-15887: hive-site.xml should be loaded") {
val session = SparkSession.builder().master("local").getOrCreate()
assert(session.sessionState.newHadoopConf().get("hive.in.test") == "true")
assert(session.sparkContext.hadoopConfiguration.get("hive.in.test") == "true")
- session.stop()
}
test("SPARK-15991: Set global Hadoop conf") {
@@ -120,7 +113,6 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
assert(session.sessionState.newHadoopConf().get(mySpecialKey) == mySpecialValue)
} finally {
session.sparkContext.hadoopConfiguration.unset(mySpecialKey)
- session.stop()
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org