You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2014/09/30 08:36:37 UTC

git commit: [SPARK-3734] DriverRunner should not read SPARK_HOME from submitter's environment

Repository: spark
Updated Branches:
  refs/heads/master de700d317 -> b167a8c7e


[SPARK-3734] DriverRunner should not read SPARK_HOME from submitter's environment

When using spark-submit in `cluster` mode to submit a job to a Spark Standalone
cluster, if the JAVA_HOME environment variable was set on the submitting
machine then DriverRunner would attempt to use the submitter's JAVA_HOME to
launch the driver process (instead of the worker's JAVA_HOME), causing the
driver to fail unless the submitter and worker had the same Java location.

This commit fixes this by reading JAVA_HOME from sys.env instead of
command.environment.

Author: Josh Rosen <jo...@apache.org>

Closes #2586 from JoshRosen/SPARK-3734 and squashes the following commits:

e9513d9 [Josh Rosen] [SPARK-3734] DriverRunner should not read SPARK_HOME from submitter's environment.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b167a8c7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b167a8c7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b167a8c7

Branch: refs/heads/master
Commit: b167a8c7e75d9e816784bd655bce1feb6c447210
Parents: de700d3
Author: Josh Rosen <jo...@apache.org>
Authored: Mon Sep 29 23:36:10 2014 -0700
Committer: Andrew Or <an...@gmail.com>
Committed: Mon Sep 29 23:36:10 2014 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/deploy/worker/CommandUtils.scala     | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b167a8c7/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index 12e98fd..2e9be2a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -30,7 +30,7 @@ import org.apache.spark.util.Utils
 private[spark]
 object CommandUtils extends Logging {
   def buildCommandSeq(command: Command, memory: Int, sparkHome: String): Seq[String] = {
-    val runner = getEnv("JAVA_HOME", command).map(_ + "/bin/java").getOrElse("java")
+    val runner = sys.env.get("JAVA_HOME").map(_ + "/bin/java").getOrElse("java")
 
     // SPARK-698: do not call the run.cmd script, as process.destroy()
     // fails to kill a process tree on Windows
@@ -38,9 +38,6 @@ object CommandUtils extends Logging {
       command.arguments
   }
 
-  private def getEnv(key: String, command: Command): Option[String] =
-    command.environment.get(key).orElse(Option(System.getenv(key)))
-
   /**
    * Attention: this must always be aligned with the environment variables in the run scripts and
    * the way the JAVA_OPTS are assembled there.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org