You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2016/03/10 20:18:14 UTC
spark git commit: [SPARK-13727][CORE] SparkConf.contains does not
consider deprecated keys
Repository: spark
Updated Branches:
refs/heads/master d24801ad2 -> 235f4ac6f
[SPARK-13727][CORE] SparkConf.contains does not consider deprecated keys
The contains() method does not return consistently with get() if the key is deprecated. For example,
import org.apache.spark.SparkConf
val conf = new SparkConf()
conf.set("spark.io.compression.lz4.block.size", "12345") # display some deprecated warning message
conf.get("spark.io.compression.lz4.block.size") # return 12345
conf.get("spark.io.compression.lz4.blockSize") # return 12345
conf.contains("spark.io.compression.lz4.block.size") # return true
conf.contains("spark.io.compression.lz4.blockSize") # return false
The fix will make the contains() and get() more consistent.
I've added a test case for this.
(Please explain how this patch was tested. E.g. unit tests, integration tests, manual tests)
Unit tests should be sufficient.
Author: bomeng <bm...@us.ibm.com>
Closes #11568 from bomeng/SPARK-13727.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/235f4ac6
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/235f4ac6
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/235f4ac6
Branch: refs/heads/master
Commit: 235f4ac6fc05802a00889a3a0b39377711cbc7e3
Parents: d24801a
Author: bomeng <bm...@us.ibm.com>
Authored: Thu Mar 10 11:17:35 2016 -0800
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Thu Mar 10 11:17:40 2016 -0800
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/SparkConf.scala | 5 ++++-
.../test/scala/org/apache/spark/SparkConfSuite.scala | 14 ++++++++++++++
2 files changed, 18 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/235f4ac6/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 16423e7..f9c01f3 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -388,7 +388,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
def getAppId: String = get("spark.app.id")
/** Does the configuration contain a given parameter? */
- def contains(key: String): Boolean = settings.containsKey(key)
+ def contains(key: String): Boolean = {
+ settings.containsKey(key) ||
+ configsWithAlternatives.get(key).toSeq.flatten.exists { alt => contains(alt.key) }
+ }
/** Copy this object */
override def clone: SparkConf = {
http://git-wip-us.apache.org/repos/asf/spark/blob/235f4ac6/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 79881f3..a883d1b 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -267,6 +267,20 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst
conf.set("spark.akka.lookupTimeout", "4")
assert(RpcUtils.lookupRpcTimeout(conf).duration === (4 seconds))
}
+
+ test("SPARK-13727") {
+ val conf = new SparkConf()
+ // set the conf in the deprecated way
+ conf.set("spark.io.compression.lz4.block.size", "12345")
+ // get the conf in the recommended way
+ assert(conf.get("spark.io.compression.lz4.blockSize") === "12345")
+ // we can still get the conf in the deprecated way
+ assert(conf.get("spark.io.compression.lz4.block.size") === "12345")
+ // the contains() also works as expected
+ assert(conf.contains("spark.io.compression.lz4.block.size"))
+ assert(conf.contains("spark.io.compression.lz4.blockSize"))
+ assert(conf.contains("spark.io.unknown") === false)
+ }
}
class Class1 {}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org