You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by jo...@apache.org on 2014/12/24 04:15:23 UTC
spark git commit: [SPARK-4881][Minor] Use SparkConf#getBoolean
instead of get().toBoolean
Repository: spark
Updated Branches:
refs/heads/master fd41eb957 -> 199e59aac
[SPARK-4881][Minor] Use SparkConf#getBoolean instead of get().toBoolean
It's really a minor issue.
In ApplicationMaster, there is code like as follows.
val preserveFiles = sparkConf.get("spark.yarn.preserve.staging.files", "false").toBoolean
I think, the code can be simplified like as follows.
val preserveFiles = sparkConf.getBoolean("spark.yarn.preserve.staging.files", false)
Author: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Closes #3733 from sarutak/SPARK-4881 and squashes the following commits:
1771430 [Kousuke Saruta] Modified the code like sparkConf.get(...).toBoolean to sparkConf.getBoolean(...)
c63daa0 [Kousuke Saruta] Simplified code
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/199e59aa
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/199e59aa
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/199e59aa
Branch: refs/heads/master
Commit: 199e59aacd540e17b31f38e0e32a3618870e9055
Parents: fd41eb9
Author: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Authored: Tue Dec 23 19:14:34 2014 -0800
Committer: Josh Rosen <jo...@databricks.com>
Committed: Tue Dec 23 19:14:34 2014 -0800
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/SecurityManager.scala | 4 ++--
core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala | 2 +-
.../scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala | 2 +-
.../src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala | 2 +-
4 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/199e59aa/core/src/main/scala/org/apache/spark/SecurityManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index 49dae52..ec82d09 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -151,8 +151,8 @@ private[spark] class SecurityManager(sparkConf: SparkConf) extends Logging with
private val authOn = sparkConf.getBoolean("spark.authenticate", false)
// keep spark.ui.acls.enable for backwards compatibility with 1.0
- private var aclsOn = sparkConf.getOption("spark.acls.enable").getOrElse(
- sparkConf.get("spark.ui.acls.enable", "false")).toBoolean
+ private var aclsOn =
+ sparkConf.getBoolean("spark.acls.enable", sparkConf.getBoolean("spark.ui.acls.enable", false))
// admin acls should be set before view or modify acls
private var adminAcls: Set[String] =
http://git-wip-us.apache.org/repos/asf/spark/blob/199e59aa/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index a157e36..0001c23 100644
--- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -131,7 +131,7 @@ class HadoopRDD[K, V](
// used to build JobTracker ID
private val createTime = new Date()
- private val shouldCloneJobConf = sc.conf.get("spark.hadoop.cloneConf", "false").toBoolean
+ private val shouldCloneJobConf = sc.conf.getBoolean("spark.hadoop.cloneConf", false)
// Returns a JobConf that will be used on slaves to obtain input splits for Hadoop reads.
protected def getJobConf(): JobConf = {
http://git-wip-us.apache.org/repos/asf/spark/blob/199e59aa/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
----------------------------------------------------------------------
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index b2e4543..9c77dff 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -311,7 +311,7 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,
private def cleanupStagingDir(fs: FileSystem) {
var stagingDirPath: Path = null
try {
- val preserveFiles = sparkConf.get("spark.yarn.preserve.staging.files", "false").toBoolean
+ val preserveFiles = sparkConf.getBoolean("spark.yarn.preserve.staging.files", false)
if (!preserveFiles) {
stagingDirPath = new Path(System.getenv("SPARK_YARN_STAGING_DIR"))
if (stagingDirPath == null) {
http://git-wip-us.apache.org/repos/asf/spark/blob/199e59aa/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
----------------------------------------------------------------------
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index 5f0c67f..eb97a7b 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -693,7 +693,7 @@ private[spark] object ClientBase extends Logging {
addClasspathEntry(Environment.PWD.$(), env)
// Normally the users app.jar is last in case conflicts with spark jars
- if (sparkConf.get("spark.yarn.user.classpath.first", "false").toBoolean) {
+ if (sparkConf.getBoolean("spark.yarn.user.classpath.first", false)) {
addUserClasspath(args, sparkConf, env)
addFileToClasspath(sparkJar(sparkConf), SPARK_JAR, env)
populateHadoopClasspath(conf, env)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org