You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/03/24 23:46:59 UTC
[spark] branch branch-2.4 updated: Revert "Revert
"[SPARK-26606][CORE] Handle driver options properly when submitting to
standalone cluster mode via legacy Client""
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new 0faf828 Revert "Revert "[SPARK-26606][CORE] Handle driver options properly when submitting to standalone cluster mode via legacy Client""
0faf828 is described below
commit 0faf828b767e0c2eea8771411db5eaee917b4613
Author: Jungtaek Lim (HeartSaVioR) <ka...@gmail.com>
AuthorDate: Sun Mar 24 16:46:36 2019 -0700
Revert "Revert "[SPARK-26606][CORE] Handle driver options properly when submitting to standalone cluster mode via legacy Client""
This reverts commits 3fc626d874d0201ada8387a7e5806672c79cd6b3.
Closes #24192 from HeartSaVioR/WIP-testing-SPARK-26606-in-branch-2.4.
Authored-by: Jungtaek Lim (HeartSaVioR) <ka...@gmail.com>
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
core/src/main/scala/org/apache/spark/deploy/Client.scala | 11 ++++++++---
1 file changed, 8 insertions(+), 3 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index d514509..708910b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -60,6 +60,10 @@ private class ClientEndpoint(
private val lostMasters = new HashSet[RpcAddress]
private var activeMasterEndpoint: RpcEndpointRef = null
+ private def getProperty(key: String, conf: SparkConf): Option[String] = {
+ sys.props.get(key).orElse(conf.getOption(key))
+ }
+
override def onStart(): Unit = {
driverArgs.cmd match {
case "launch" =>
@@ -69,18 +73,19 @@ private class ClientEndpoint(
val mainClass = "org.apache.spark.deploy.worker.DriverWrapper"
val classPathConf = "spark.driver.extraClassPath"
- val classPathEntries = sys.props.get(classPathConf).toSeq.flatMap { cp =>
+ val classPathEntries = getProperty(classPathConf, conf).toSeq.flatMap { cp =>
cp.split(java.io.File.pathSeparator)
}
val libraryPathConf = "spark.driver.extraLibraryPath"
- val libraryPathEntries = sys.props.get(libraryPathConf).toSeq.flatMap { cp =>
+ val libraryPathEntries = getProperty(libraryPathConf, conf).toSeq.flatMap { cp =>
cp.split(java.io.File.pathSeparator)
}
val extraJavaOptsConf = "spark.driver.extraJavaOptions"
- val extraJavaOpts = sys.props.get(extraJavaOptsConf)
+ val extraJavaOpts = getProperty(extraJavaOptsConf, conf)
.map(Utils.splitCommandString).getOrElse(Seq.empty)
+
val sparkJavaOpts = Utils.sparkJavaOpts(conf)
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = new Command(mainClass,
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org