You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/10 03:38:22 UTC

[17/37] git commit: Some notes and TODO about dependencies

Some notes and TODO about dependencies


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/c8c8b42a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/c8c8b42a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/c8c8b42a

Branch: refs/heads/master
Commit: c8c8b42a6fde0d59217b264bb2439751696c467f
Parents: 55c8bb7
Author: Patrick Wendell <pw...@gmail.com>
Authored: Fri Dec 27 15:13:11 2013 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Fri Dec 27 15:13:11 2013 -0800

----------------------------------------------------------------------
 .../apache/spark/deploy/client/DriverClientArguments.scala   | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/c8c8b42a/core/src/main/scala/org/apache/spark/deploy/client/DriverClientArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/DriverClientArguments.scala b/core/src/main/scala/org/apache/spark/deploy/client/DriverClientArguments.scala
index 28bc549..0c84cc9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/DriverClientArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/DriverClientArguments.scala
@@ -88,6 +88,12 @@ private[spark] class DriverClientArguments(args: Array[String]) {
    * Print usage and exit JVM with the given exit code.
    */
   def printUsageAndExit(exitCode: Int) {
+    // TODO: Document the submission approach here. It is:
+    //      1) Create an uber jar with your application and dependencies (excluding Spark)
+    //      2) You'll need to add this jar using addJar(X) inside of your spark context
+
+    // TODO: It wouldnt be too hard to allow users to submit their app and dependency jars
+    //       separately similar to in the YARN client.
     System.err.println(
       "usage: DriverClient [options] launch <active-master> <jar-url> <main-class> " +
         "[driver options]\n" +
@@ -95,7 +101,7 @@ private[spark] class DriverClientArguments(args: Array[String]) {
       "Options:\n" +
       "  -c CORES, --cores CORES                Number of cores to request \n" +
       "  -m MEMORY, --memory MEMORY             Megabytes of memory to request\n" +
-      "  -j JAVA_OPT, --java-option JAVA_OPT    Java option to pass to driver\n" +
+      "  -o JAVA_OPT, --java-option JAVA_OPT    JVM option to pass to driver\n" +
       "  -e K=V, --environment-variable K=V     Environment variable to pass to driver\n")
     System.exit(exitCode)
   }