You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/10 03:38:36 UTC

[31/37] git commit: Fixes

Fixes


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/c78b381e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/c78b381e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/c78b381e

Branch: refs/heads/master
Commit: c78b381e91c9902a1510a2ed4ec5c898b51adfe8
Parents: d0533f7
Author: Patrick Wendell <pw...@gmail.com>
Authored: Tue Jan 7 23:56:04 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Wed Jan 8 00:09:12 2014 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/deploy/Client.scala        | 2 +-
 .../scala/org/apache/spark/deploy/worker/ExecutorRunner.scala   | 2 +-
 docs/spark-standalone.md                                        | 5 +++--
 3 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/c78b381e/core/src/main/scala/org/apache/spark/deploy/Client.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index 0475bb1..43b9b1c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -55,7 +55,7 @@ class DriverActor(master: String, response: Promise[(Boolean, String)]) extends
 /**
  * Executable utility for starting and terminating drivers inside of a standalone cluster.
  */
-object DriverClient {
+object Client {
 
   def main(args: Array[String]) {
     val driverArgs = new ClientArguments(args)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/c78b381e/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index a9cb998..18885d7 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -100,7 +100,7 @@ private[spark] class ExecutorRunner(
 
   def getCommandSeq = {
     val command = Command(appDesc.command.mainClass,
-      appDesc.command.arguments.map(substituteVariables), appDesc.command.environment)
+      appDesc.command.arguments.map(substituteVariables) ++ Seq(appId), appDesc.command.environment)
     CommandUtils.buildCommandSeq(command, memory, sparkHome.getAbsolutePath)
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/c78b381e/docs/spark-standalone.md
----------------------------------------------------------------------
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index 7da6474..ecd642c 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -151,19 +151,20 @@ You can also pass an option `-c <numCores>` to control the number of cores that
 You may also run your application entirely inside of the cluster by submitting your application driver using the submission client. The syntax for submitting applications is as follows:
 
 
-    ./spark-class org.apache.spark.deploy.client.DriverClient launch 
+    ./spark-class org.apache.spark.deploy.Client launch 
        [client-options] \
        <cluster-url> <application-jar-url> <main-class> \
        [application-options]
 
     cluster-url: The URL of the master node.
-    application-jar-url: Path to a bundled jar including your application and all dependencies. Currently, the URL must be visible from inside of your cluster, for instance, in an HDFS directory. 
+    application-jar-url: Path to a bundled jar including your application and all dependencies. Currently, the URL must be globally visible inside of your cluster, for instance, an `hdfs://` path or a `file://` path that is present on all nodes. 
     main-class: The entry point for your application.
 
     Client Options:
       --memory <count> (amount of memory, in MB, allocated for your driver program)
       --cores <count> (number of cores allocated for your driver program)
       --supervise (whether to automatically restart your driver on application or node failure)
+      --verbose (prints increased logging output)
 
 Keep in mind that your driver program will be executed on a remote worker machine. You can control the execution environment in the following ways: