You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2019/03/05 21:06:03 UTC
[spark] branch master updated: [SPARK-27015][MESOS] properly escape
mesos scheduler arguments
This is an automated email from the ASF dual-hosted git repository.
vanzin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 0ba1954 [SPARK-27015][MESOS] properly escape mesos scheduler arguments
0ba1954 is described below
commit 0ba19543d22bd447f1a40179401e4b9dfa00fdd2
Author: mwlon <ml...@hmc.edu>
AuthorDate: Tue Mar 5 13:05:37 2019 -0800
[SPARK-27015][MESOS] properly escape mesos scheduler arguments
## What changes were proposed in this pull request?
Escape arguments for submissions sent to a Mesos dispatcher; analogous change to https://issues.apache.org/jira/browse/SPARK-24380 for confs.
Since this changes behavior than some users are undoubtedly already working around, probably best to only only merge into master.
## How was this patch tested?
Added a new unit test, covering some existing behavior as well.
Closes #23967 from mwlon/SPARK-27015.
Authored-by: mwlon <ml...@hmc.edu>
Signed-off-by: Marcelo Vanzin <va...@cloudera.com>
---
.../cluster/mesos/MesosClusterScheduler.scala | 4 +--
.../cluster/mesos/MesosClusterSchedulerSuite.scala | 37 ++++++++++++++++++++++
2 files changed, 39 insertions(+), 2 deletions(-)
diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala
index a527783..3ffccb0 100644
--- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala
+++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala
@@ -460,7 +460,7 @@ private[spark] class MesosClusterScheduler(
containerInfo
}
- private def getDriverCommandValue(desc: MesosDriverDescription): String = {
+ private[mesos] def getDriverCommandValue(desc: MesosDriverDescription): String = {
val dockerDefined = desc.conf.contains(config.EXECUTOR_DOCKER_IMAGE)
val executorUri = getDriverExecutorURI(desc)
// Gets the path to run spark-submit, and the path to the Mesos sandbox.
@@ -500,7 +500,7 @@ private[spark] class MesosClusterScheduler(
}
}
- val appArguments = desc.command.arguments.mkString(" ")
+ val appArguments = desc.command.arguments.map(shellEscape).mkString(" ")
s"$executable $cmdOptions $primaryResource $appArguments"
}
diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
index 81b5250..536f5a2 100644
--- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
+++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
@@ -486,6 +486,43 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
Utils.verifyFileBasedValueSecrets(launchedTasks)
}
+ test("assembles a valid driver command, escaping all confs and args") {
+ setScheduler()
+
+ val mem = 1000
+ val cpu = 1
+ val driverDesc = new MesosDriverDescription(
+ "d1",
+ "jar",
+ mem,
+ cpu,
+ true,
+ new Command(
+ "Main",
+ Seq("--a=$2", "--b", "x y z"),
+ Map(),
+ Seq(),
+ Seq(),
+ Seq()),
+ Map("spark.app.name" -> "app name",
+ config.EXECUTOR_URI.key -> "s3a://bucket/spark-version.tgz",
+ "another.conf" -> "\\value"),
+ "s1",
+ new Date())
+
+ val expectedCmd = "cd spark-version*; " +
+ "bin/spark-submit --name \"app name\" --master mesos://mesos://localhost:5050 " +
+ "--driver-cores 1.0 --driver-memory 1000M --class Main --py-files " +
+ "--conf spark.executor.uri=s3a://bucket/spark-version.tgz " +
+ "--conf \"another.conf=\\\\value\" " +
+ "--conf \"spark.app.name=app name\" " +
+ "../jar " +
+ "\"--a=\\$2\" " +
+ "--b \"x y z\""
+
+ assert(scheduler.getDriverCommandValue(driverDesc) == expectedCmd)
+ }
+
private def launchDriverTask(addlSparkConfVars: Map[String, String]): List[TaskInfo] = {
setScheduler()
val mem = 1000
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org