You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2016/08/25 23:11:47 UTC

spark git commit: [SPARK-17240][CORE] Make SparkConf serializable again.

Repository: spark
Updated Branches:
  refs/heads/master 3e4c7db4d -> 9b5a1d1d5


[SPARK-17240][CORE] Make SparkConf serializable again.

Make the config reader transient, and initialize it lazily so that
serialization works with both java and kryo (and hopefully any other
custom serializer).

Added unit test to make sure SparkConf remains serializable and the
reader works with both built-in serializers.

Author: Marcelo Vanzin <va...@cloudera.com>

Closes #14813 from vanzin/SPARK-17240.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9b5a1d1d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9b5a1d1d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9b5a1d1d

Branch: refs/heads/master
Commit: 9b5a1d1d53bc4412de3cbc86dc819b0c213229a8
Parents: 3e4c7db
Author: Marcelo Vanzin <va...@cloudera.com>
Authored: Thu Aug 25 16:11:42 2016 -0700
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Thu Aug 25 16:11:42 2016 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SparkConf.scala | 11 ++++++----
 .../scala/org/apache/spark/SparkConfSuite.scala | 22 +++++++++++++++++++-
 2 files changed, 28 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9b5a1d1d/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 31b41d9..e85e5aa 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -56,10 +56,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
 
   private val settings = new ConcurrentHashMap[String, String]()
 
-  private val reader = new ConfigReader(new SparkConfigProvider(settings))
-  reader.bindEnv(new ConfigProvider {
-    override def get(key: String): Option[String] = Option(getenv(key))
-  })
+  @transient private lazy val reader: ConfigReader = {
+    val _reader = new ConfigReader(new SparkConfigProvider(settings))
+    _reader.bindEnv(new ConfigProvider {
+      override def get(key: String): Option[String] = Option(getenv(key))
+    })
+    _reader
+  }
 
   if (loadDefaults) {
     loadFromSystemProperties(false)

http://git-wip-us.apache.org/repos/asf/spark/blob/9b5a1d1d/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 1f0f655..83906cf 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -26,8 +26,9 @@ import scala.util.{Random, Try}
 
 import com.esotericsoftware.kryo.Kryo
 
+import org.apache.spark.internal.config._
 import org.apache.spark.network.util.ByteUnit
-import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
+import org.apache.spark.serializer.{JavaSerializer, KryoRegistrator, KryoSerializer}
 import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
 
 class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
@@ -283,6 +284,25 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
     assert(conf.contains("spark.io.compression.lz4.blockSize"))
     assert(conf.contains("spark.io.unknown") === false)
   }
+
+  val serializers = Map(
+    "java" -> new JavaSerializer(new SparkConf()),
+    "kryo" -> new KryoSerializer(new SparkConf()))
+
+  serializers.foreach { case (name, ser) =>
+    test(s"SPARK-17240: SparkConf should be serializable ($name)") {
+      val conf = new SparkConf()
+      conf.set(DRIVER_CLASS_PATH, "${" + DRIVER_JAVA_OPTIONS.key + "}")
+      conf.set(DRIVER_JAVA_OPTIONS, "test")
+
+      val serializer = ser.newInstance()
+      val bytes = serializer.serialize(conf)
+      val deser = serializer.deserialize[SparkConf](bytes)
+
+      assert(conf.get(DRIVER_CLASS_PATH) === deser.get(DRIVER_CLASS_PATH))
+    }
+  }
+
 }
 
 class Class1 {}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org