You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/05/17 05:25:20 UTC
git commit: SPARK-1864 Look in spark conf instead of system
properties when propagating configuration to executors.
Repository: spark
Updated Branches:
refs/heads/master fed6303f2 -> a80a6a139
SPARK-1864 Look in spark conf instead of system properties when propagating configuration to executors.
Author: Michael Armbrust <mi...@databricks.com>
Closes #808 from marmbrus/confClasspath and squashes the following commits:
4c31d57 [Michael Armbrust] Look in spark conf instead of system properties when propagating configuration to executors.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a80a6a13
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a80a6a13
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a80a6a13
Branch: refs/heads/master
Commit: a80a6a139e729ee3f81ec4f0028e084d2d9f7e82
Parents: fed6303
Author: Michael Armbrust <mi...@databricks.com>
Authored: Fri May 16 20:25:10 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Fri May 16 20:25:10 2014 -0700
----------------------------------------------------------------------
.../scheduler/cluster/SparkDeploySchedulerBackend.scala | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/a80a6a13/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 933f6e0..9768670 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -46,12 +46,13 @@ private[spark] class SparkDeploySchedulerBackend(
CoarseGrainedSchedulerBackend.ACTOR_NAME)
val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}", "{{WORKER_URL}}")
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions")
- val classPathEntries = sys.props.get("spark.executor.extraClassPath").toSeq.flatMap { cp =>
- cp.split(java.io.File.pathSeparator)
- }
- val libraryPathEntries = sys.props.get("spark.executor.extraLibraryPath").toSeq.flatMap { cp =>
+ val classPathEntries = sc.conf.getOption("spark.executor.extraClassPath").toSeq.flatMap { cp =>
cp.split(java.io.File.pathSeparator)
}
+ val libraryPathEntries =
+ sc.conf.getOption("spark.executor.extraLibraryPath").toSeq.flatMap { cp =>
+ cp.split(java.io.File.pathSeparator)
+ }
val command = Command(
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs,