You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/04/29 21:54:11 UTC

git commit: SPARK-1588. Restore SPARK_YARN_USER_ENV and SPARK_JAVA_OPTS for YARN.

Repository: spark
Updated Branches:
  refs/heads/master 7d1505841 -> bf8d0aa27


SPARK-1588.  Restore SPARK_YARN_USER_ENV and SPARK_JAVA_OPTS for YARN.

Author: Sandy Ryza <sa...@cloudera.com>

Closes #586 from sryza/sandy-spark-1588 and squashes the following commits:

35eb38e [Sandy Ryza] Scalify
b361684 [Sandy Ryza] SPARK-1588.  Restore SPARK_YARN_USER_ENV and SPARK_JAVA_OPTS for YARN.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bf8d0aa2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bf8d0aa2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bf8d0aa2

Branch: refs/heads/master
Commit: bf8d0aa27844b1e58f131d49a3f668d1614ca6e7
Parents: 7d15058
Author: Sandy Ryza <sa...@cloudera.com>
Authored: Tue Apr 29 12:54:02 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Tue Apr 29 12:54:02 2014 -0700

----------------------------------------------------------------------
 .../org/apache/spark/deploy/yarn/ClientBase.scala   | 16 +++++++++++++---
 .../spark/deploy/yarn/ExecutorRunnableUtil.scala    |  4 ++--
 2 files changed, 15 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bf8d0aa2/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
----------------------------------------------------------------------
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
index f2be821..27a518c 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala
@@ -263,9 +263,13 @@ trait ClientBase extends Logging {
     distCacheMgr.setDistFilesEnv(env)
     distCacheMgr.setDistArchivesEnv(env)
 
-    // Allow users to specify some environment variables.
-    YarnSparkHadoopUtil.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"),
-      File.pathSeparator)
+    sys.env.get("SPARK_YARN_USER_ENV").foreach { userEnvs =>
+      // Allow users to specify some environment variables.
+      YarnSparkHadoopUtil.setEnvFromInputString(env, userEnvs, File.pathSeparator)
+
+      // Pass SPARK_YARN_USER_ENV itself to the AM so it can use it to set up executor environments.
+      env("SPARK_YARN_USER_ENV") = userEnvs
+    }
 
     env
   }
@@ -322,6 +326,12 @@ trait ClientBase extends Logging {
       JAVA_OPTS += "-XX:CMSIncrementalDutyCycle=10"
     }
 
+    // SPARK_JAVA_OPTS is deprecated, but for backwards compatibility:
+    sys.env.get("SPARK_JAVA_OPTS").foreach { opts =>
+      sparkConf.set("spark.executor.extraJavaOptions", opts)
+      sparkConf.set("spark.driver.extraJavaOptions", opts)
+    }
+
     // TODO: it might be nicer to pass these as an internal environment variable rather than
     // as Java options, due to complications with string parsing of nested quotes.
     if (args.amClass == classOf[ExecutorLauncher].getName) {

http://git-wip-us.apache.org/repos/asf/spark/blob/bf8d0aa2/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
----------------------------------------------------------------------
diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
index 7d07f6f..96f8aa9 100644
--- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
+++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala
@@ -71,8 +71,8 @@ trait ExecutorRunnableUtil extends Logging {
     /*
         else {
           // If no java_opts specified, default to using -XX:+CMSIncrementalMode
-          // It might be possible that other modes/config is being done in SPARK_JAVA_OPTS, so we dont
-          // want to mess with it.
+          // It might be possible that other modes/config is being done in spark.executor.extraJavaOptions,
+          // so we dont want to mess with it.
           // In our expts, using (default) throughput collector has severe perf ramnifications in
           // multi-tennent machines
           // The options are based on