You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/03/02 01:21:31 UTC

git commit: [SPARK-1150] fix repo location in create script

Repository: spark
Updated Branches:
  refs/heads/master 556c56689 -> 9aa095711


[SPARK-1150] fix repo location in create script

https://spark-project.atlassian.net/browse/SPARK-1150

fix the repo location in create_release script

Author: Mark Grover <ma...@apache.org>

Closes #48 from CodingCat/script_fixes and squashes the following commits:

01f4bf7 [Mark Grover] Fixing some nitpicks
d2244d4 [Mark Grover] SPARK-676: Abbreviation in SPARK_MEM but not in SPARK_WORKER_MEMORY


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9aa09571
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9aa09571
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9aa09571

Branch: refs/heads/master
Commit: 9aa095711858ce8670e51488f66a3d7c1a821c30
Parents: 556c566
Author: Mark Grover <ma...@apache.org>
Authored: Sat Mar 1 16:21:22 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Sat Mar 1 16:21:22 2014 -0800

----------------------------------------------------------------------
 conf/spark-env.sh.template                                |  2 +-
 .../org/apache/spark/deploy/worker/WorkerArguments.scala  | 10 ++++++++--
 docs/spark-standalone.md                                  |  4 ++--
 3 files changed, 11 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9aa09571/conf/spark-env.sh.template
----------------------------------------------------------------------
diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template
index 6432a56..619fc27 100755
--- a/conf/spark-env.sh.template
+++ b/conf/spark-env.sh.template
@@ -15,7 +15,7 @@
 # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
 # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports
 # - SPARK_WORKER_CORES, to set the number of cores to use on this machine
-# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g)
+# - SPARK_WORKER_MEM, to set how much memory to use (e.g. 1000m, 2g)
 # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
 # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
 # - SPARK_WORKER_DIR, to set the working directory of worker processes

http://git-wip-us.apache.org/repos/asf/spark/blob/9aa09571/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index d35d5be..52c4419 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -18,13 +18,15 @@
 package org.apache.spark.deploy.worker
 
 import java.lang.management.ManagementFactory
+import org.apache.spark.Logging
 
 import org.apache.spark.util.{IntParam, MemoryParam, Utils}
 
 /**
  * Command-line parser for the master.
  */
-private[spark] class WorkerArguments(args: Array[String]) {
+private[spark] class WorkerArguments(args: Array[String]) extends Logging {
+  initLogging()
   var host = Utils.localHostName()
   var port = 0
   var webUiPort = 8081
@@ -40,9 +42,13 @@ private[spark] class WorkerArguments(args: Array[String]) {
   if (System.getenv("SPARK_WORKER_CORES") != null) {
     cores = System.getenv("SPARK_WORKER_CORES").toInt
   }
-  if (System.getenv("SPARK_WORKER_MEMORY") != null) {
+  if (System.getenv("SPARK_WORKER_MEM") != null) {
+    memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEM"))
+  } else if (System.getenv("SPARK_WORKER_MEMORY") != null) {
+    logWarning("SPARK_WORKER_MEMORY is deprecated. Please use SPARK_WORKER_MEM instead")
     memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY"))
   }
+
   if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
     webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/9aa09571/docs/spark-standalone.md
----------------------------------------------------------------------
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index 51fb3a4..a2dec86 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -104,8 +104,8 @@ You can optionally configure the cluster further by setting environment variable
     <td>Total number of cores to allow Spark applications to use on the machine (default: all available cores).</td>
   </tr>
   <tr>
-    <td><code>SPARK_WORKER_MEMORY</code></td>
-    <td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property.</td>
+    <td><code>SPARK_WORKER_MEM</code></td>
+    <td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property. The old variable </code>SPARK_WORKER_MEMORY</code> has been deprecated.</td>
   </tr>
   <tr>
     <td><code>SPARK_WORKER_WEBUI_PORT</code></td>