You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/02 06:29:58 UTC

[20/33] git commit: Added tests for SparkConf and fixed a bug

Added tests for SparkConf and fixed a bug

Typesafe Config caches system properties the first time it's invoked
by default, ignoring later changes unless you do something special


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/11540b79
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/11540b79
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/11540b79

Branch: refs/heads/master
Commit: 11540b798d622f3883cb40b20cc30ea7d894790a
Parents: 1ee7f5a
Author: Matei Zaharia <ma...@databricks.com>
Authored: Sun Dec 29 18:44:06 2013 -0500
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sun Dec 29 18:44:06 2013 -0500

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SparkConf.scala |   1 +
 core/src/test/resources/spark.conf              |   6 +
 .../scala/org/apache/spark/SparkConfSuite.scala | 110 +++++++++++++++++++
 3 files changed, 117 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/11540b79/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 670c8b4..8cecaff 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -26,6 +26,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
   private val settings = new HashMap[String, String]()
 
   if (loadDefaults) {
+    ConfigFactory.invalidateCaches()
     val typesafeConfig = ConfigFactory.systemProperties()
       .withFallback(ConfigFactory.parseResources("spark.conf"))
     for (e <- typesafeConfig.entrySet().asScala if e.getKey.startsWith("spark.")) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/11540b79/core/src/test/resources/spark.conf
----------------------------------------------------------------------
diff --git a/core/src/test/resources/spark.conf b/core/src/test/resources/spark.conf
new file mode 100644
index 0000000..6c99bdc
--- /dev/null
+++ b/core/src/test/resources/spark.conf
@@ -0,0 +1,6 @@
+spark.test.intTestProperty = 1
+
+spark.test {
+  stringTestProperty = "hi"
+  listTestProperty = ["a", "b"]
+}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/11540b79/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
new file mode 100644
index 0000000..aaf0b80
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -0,0 +1,110 @@
+package org.apache.spark
+
+import org.scalatest.FunSuite
+
+class SparkConfSuite extends FunSuite with LocalSparkContext {
+  // This test uses the spark.conf in core/src/test/resources, which has a few test properties
+  test("loading from spark.conf") {
+    val conf = new SparkConf()
+    assert(conf.get("spark.test.intTestProperty") === "1")
+    assert(conf.get("spark.test.stringTestProperty") === "hi")
+    // NOTE: we don't use list properties yet, but when we do, we'll have to deal with this syntax
+    assert(conf.get("spark.test.listTestProperty") === "[a, b]")
+  }
+
+  // This test uses the spark.conf in core/src/test/resources, which has a few test properties
+  test("system properties override spark.conf") {
+    try {
+      System.setProperty("spark.test.intTestProperty", "2")
+      val conf = new SparkConf()
+      assert(conf.get("spark.test.intTestProperty") === "2")
+      assert(conf.get("spark.test.stringTestProperty") === "hi")
+    } finally {
+      System.clearProperty("spark.test.intTestProperty")
+    }
+  }
+
+  test("initializing without loading defaults") {
+    try {
+      System.setProperty("spark.test.intTestProperty", "2")
+      val conf = new SparkConf(false)
+      assert(!conf.contains("spark.test.intTestProperty"))
+      assert(!conf.contains("spark.test.stringTestProperty"))
+    } finally {
+      System.clearProperty("spark.test.intTestProperty")
+    }
+  }
+
+  test("named set methods") {
+    val conf = new SparkConf(false)
+
+    conf.setMaster("local[3]")
+    conf.setAppName("My app")
+    conf.setSparkHome("/path")
+    conf.setJars(Seq("a.jar", "b.jar"))
+    conf.setExecutorEnv("VAR1", "value1")
+    conf.setExecutorEnv(Seq(("VAR2", "value2"), ("VAR3", "value3")))
+
+    assert(conf.get("spark.master") === "local[3]")
+    assert(conf.get("spark.appName") === "My app")
+    assert(conf.get("spark.home") === "/path")
+    assert(conf.get("spark.jars") === "a.jar,b.jar")
+    assert(conf.get("spark.executorEnv.VAR1") === "value1")
+    assert(conf.get("spark.executorEnv.VAR2") === "value2")
+    assert(conf.get("spark.executorEnv.VAR3") === "value3")
+
+    // Test the Java-friendly versions of these too
+    conf.setJars(Array("c.jar", "d.jar"))
+    conf.setExecutorEnv(Array(("VAR4", "value4"), ("VAR5", "value5")))
+    assert(conf.get("spark.jars") === "c.jar,d.jar")
+    assert(conf.get("spark.executorEnv.VAR4") === "value4")
+    assert(conf.get("spark.executorEnv.VAR5") === "value5")
+  }
+
+  test("basic get and set") {
+    val conf = new SparkConf(false)
+    assert(conf.getAll.toSet === Set())
+    conf.set("k1", "v1")
+    conf.setAll(Seq(("k2", "v2"), ("k3", "v3")))
+    assert(conf.getAll.toSet === Set(("k1", "v1"), ("k2", "v2"), ("k3", "v3")))
+    conf.set("k1", "v4")
+    conf.setAll(Seq(("k2", "v5"), ("k3", "v6")))
+    assert(conf.getAll.toSet === Set(("k1", "v4"), ("k2", "v5"), ("k3", "v6")))
+    assert(conf.contains("k1"), "conf did not contain k1")
+    assert(!conf.contains("k4"), "conf contained k4")
+    assert(conf.get("k1") === "v4")
+    intercept[Exception] { conf.get("k4") }
+    assert(conf.getOrElse("k4", "not found") === "not found")
+    assert(conf.getOption("k1") === Some("v4"))
+    assert(conf.getOption("k4") === None)
+  }
+
+  test("creating SparkContext without master and app name") {
+    val conf = new SparkConf(false)
+    intercept[SparkException] { sc = new SparkContext(conf) }
+  }
+
+  test("creating SparkContext without master") {
+    val conf = new SparkConf(false).setAppName("My app")
+    intercept[SparkException] { sc = new SparkContext(conf) }
+  }
+
+  test("creating SparkContext without app name") {
+    val conf = new SparkConf(false).setMaster("local")
+    intercept[SparkException] { sc = new SparkContext(conf) }
+  }
+
+  test("creating SparkContext with both master and app name") {
+    val conf = new SparkConf(false).setMaster("local").setAppName("My app")
+    sc = new SparkContext(conf)
+    assert(sc.master === "local")
+    assert(sc.appName === "My app")
+  }
+
+  test("SparkContext property overriding") {
+    val conf = new SparkConf(false).setMaster("local").setAppName("My app")
+    sc = new SparkContext("local[2]", "My other app", conf)
+    assert(sc.master === "local[2]")
+    assert(sc.appName === "My other app")
+  }
+}