You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/12/19 01:21:03 UTC

spark git commit: [SPARK-12345][CORE] Do not send SPARK_HOME through Spark submit REST interface

Repository: spark
Updated Branches:
  refs/heads/master 007a32f90 -> ba9332edd


[SPARK-12345][CORE] Do not send SPARK_HOME through Spark submit REST interface

It is usually an invalid location on the remote machine executing the job.
It is picked up by the Mesos support in cluster mode, and most of the time causes
the job to fail.

Fixes SPARK-12345

Author: Luc Bourlier <lu...@typesafe.com>

Closes #10329 from skyluc/issue/SPARK_HOME.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ba9332ed
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ba9332ed
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ba9332ed

Branch: refs/heads/master
Commit: ba9332edd889730c906404041bc83b1643d80961
Parents: 007a32f
Author: Luc Bourlier <lu...@typesafe.com>
Authored: Fri Dec 18 16:21:01 2015 -0800
Committer: Andrew Or <an...@databricks.com>
Committed: Fri Dec 18 16:21:01 2015 -0800

----------------------------------------------------------------------
 .../org/apache/spark/deploy/rest/RestSubmissionClient.scala    | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ba9332ed/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
index f0dd667..0744c64 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
@@ -428,8 +428,10 @@ private[spark] object RestSubmissionClient {
    * Filter non-spark environment variables from any environment.
    */
   private[rest] def filterSystemEnvironment(env: Map[String, String]): Map[String, String] = {
-    env.filter { case (k, _) =>
-      (k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED") || k.startsWith("MESOS_")
+    env.filterKeys { k =>
+      // SPARK_HOME is filtered out because it is usually wrong on the remote machine (SPARK-12345)
+      (k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED" && k != "SPARK_HOME") ||
+        k.startsWith("MESOS_")
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org