You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/01/03 20:25:05 UTC

[14/21] git commit: Merge branch 'scripts-reorg' of github.com:shane-huang/incubator-spark into spark-915-segregate-scripts

Merge branch 'scripts-reorg' of github.com:shane-huang/incubator-spark into spark-915-segregate-scripts

Conflicts:
	bin/spark-shell
	core/pom.xml
	core/src/main/scala/org/apache/spark/SparkContext.scala
	core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
	core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
	core/src/test/scala/org/apache/spark/DriverSuite.scala
	python/run-tests
	sbin/compute-classpath.sh
	sbin/spark-class
	sbin/stop-slaves.sh


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/980afd28
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/980afd28
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/980afd28

Branch: refs/heads/master
Commit: 980afd280a331103ce7391adaf484dd497218741
Parents: 3713f81 52ccf4f
Author: Prashant Sharma <pr...@imaginea.com>
Authored: Thu Jan 2 17:55:21 2014 +0530
Committer: Prashant Sharma <pr...@imaginea.com>
Committed: Thu Jan 2 17:55:21 2014 +0530

----------------------------------------------------------------------
 assembly/src/main/assembly/assembly.xml         |   11 +-
 bin/compute-classpath.cmd                       |   69 --
 bin/compute-classpath.sh                        |   75 --
 bin/pyspark                                     |   70 ++
 bin/pyspark.cmd                                 |   23 +
 bin/pyspark2.cmd                                |   55 +
 bin/run-example                                 |   91 ++
 bin/run-example.cmd                             |   23 +
 bin/run-example2.cmd                            |   61 ++
 bin/slaves.sh                                   |   91 --
 bin/spark-config.sh                             |   36 -
 bin/spark-daemon.sh                             |  183 ----
 bin/spark-daemons.sh                            |   35 -
 bin/spark-shell                                 |  102 ++
 bin/spark-shell.cmd                             |   23 +
 bin/start-all.sh                                |   34 -
 bin/start-master.sh                             |   52 -
 bin/start-slave.sh                              |   35 -
 bin/start-slaves.sh                             |   48 -
 bin/stop-all.sh                                 |   32 -
 bin/stop-master.sh                              |   27 -
 bin/stop-slaves.sh                              |   35 -
 .../spark/deploy/worker/ExecutorRunner.scala    |    2 +-
 .../mesos/CoarseMesosSchedulerBackend.scala     |    4 +-
 .../cluster/mesos/MesosSchedulerBackend.scala   |    4 +-
 .../apache/spark/ui/UIWorkloadGenerator.scala   |    2 +-
 .../scala/org/apache/spark/DriverSuite.scala    |    2 +-
 data/kmeans_data.txt                            |    6 +
 data/lr_data.txt                                | 1000 ++++++++++++++++++
 data/pagerank_data.txt                          |    6 +
 docs/running-on-yarn.md                         |    4 +-
 docs/spark-standalone.md                        |   14 +-
 kmeans_data.txt                                 |    6 -
 lr_data.txt                                     | 1000 ------------------
 make-distribution.sh                            |    5 +-
 pagerank_data.txt                               |    6 -
 pyspark                                         |   70 --
 pyspark.cmd                                     |   23 -
 pyspark2.cmd                                    |   55 -
 python/pyspark/java_gateway.py                  |    2 +-
 python/run-tests                                |    2 +-
 repl-bin/src/deb/bin/run                        |    3 +-
 repl/pom.xml                                    |    1 -
 run-example                                     |   91 --
 run-example.cmd                                 |   23 -
 run-example2.cmd                                |   61 --
 sbin/compute-classpath.cmd                      |   69 ++
 sbin/compute-classpath.sh                       |   75 ++
 sbin/slaves.sh                                  |   91 ++
 sbin/spark-class                                |  154 +++
 sbin/spark-class.cmd                            |   23 +
 sbin/spark-class2.cmd                           |   85 ++
 sbin/spark-config.sh                            |   36 +
 sbin/spark-daemon.sh                            |  183 ++++
 sbin/spark-daemons.sh                           |   35 +
 sbin/spark-executor                             |   23 +
 sbin/start-all.sh                               |   34 +
 sbin/start-master.sh                            |   52 +
 sbin/start-slave.sh                             |   35 +
 sbin/start-slaves.sh                            |   48 +
 sbin/stop-all.sh                                |   32 +
 sbin/stop-master.sh                             |   27 +
 sbin/stop-slaves.sh                             |   35 +
 spark-class                                     |  154 ---
 spark-class.cmd                                 |   23 -
 spark-class2.cmd                                |   85 --
 spark-executor                                  |   22 -
 spark-shell                                     |  102 --
 spark-shell.cmd                                 |   22 -
 69 files changed, 2521 insertions(+), 2527 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/bin/pyspark
----------------------------------------------------------------------
diff --cc bin/pyspark
index 0000000,392a92b..d6810f4
mode 000000,100755..100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@@ -1,0 -1,66 +1,70 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ # Figure out where the Scala framework is installed
+ FWDIR="$(cd `dirname $0`/..; pwd)"
+ 
+ # Export this as SPARK_HOME
+ export SPARK_HOME="$FWDIR"
+ 
 -SCALA_VERSION=2.9.3
++SCALA_VERSION=2.10
+ 
+ # Exit if the user hasn't compiled Spark
+ if [ ! -f "$FWDIR/RELEASE" ]; then
+   # Exit if the user hasn't compiled Spark
+   ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
+   if [[ $? != 0 ]]; then
+     echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
+     echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
+     exit 1
+   fi
+ fi
+ 
+ # Load environment variables from conf/spark-env.sh, if it exists
+ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+   . $FWDIR/conf/spark-env.sh
+ fi
+ 
+ # Figure out which Python executable to use
+ if [ -z "$PYSPARK_PYTHON" ] ; then
+   PYSPARK_PYTHON="python"
+ fi
+ export PYSPARK_PYTHON
+ 
+ # Add the PySpark classes to the Python path:
+ export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
+ 
+ # Load the PySpark shell.py script when ./pyspark is used interactively:
+ export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
+ export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
+ 
+ if [ -n "$IPYTHON_OPTS" ]; then
+   IPYTHON=1
+ fi
+ 
+ if [[ "$IPYTHON" = "1" ]] ; then
 -  IPYTHON_OPTS=${IPYTHON_OPTS:--i}
 -  exec ipython "$IPYTHON_OPTS" -c "%run $PYTHONSTARTUP"
++  # IPython <1.0.0 doesn't honor PYTHONSTARTUP, while 1.0.0+ does. 
++  # Hence we clear PYTHONSTARTUP and use the -c "%run $IPYTHONSTARTUP" command which works on all versions
++  # We also force interactive mode with "-i"
++  IPYTHONSTARTUP=$PYTHONSTARTUP
++  PYTHONSTARTUP=
++  exec ipython "$IPYTHON_OPTS" -i -c "%run $IPYTHONSTARTUP"
+ else
+   exec "$PYSPARK_PYTHON" "$@"
+ fi

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/bin/pyspark2.cmd
----------------------------------------------------------------------
diff --cc bin/pyspark2.cmd
index 0000000,bb8e624..9579109
mode 000000,100644..100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@@ -1,0 -1,55 +1,55 @@@
+ @echo off
+ 
+ rem
+ rem Licensed to the Apache Software Foundation (ASF) under one or more
+ rem contributor license agreements.  See the NOTICE file distributed with
+ rem this work for additional information regarding copyright ownership.
+ rem The ASF licenses this file to You under the Apache License, Version 2.0
+ rem (the "License"); you may not use this file except in compliance with
+ rem the License.  You may obtain a copy of the License at
+ rem
+ rem    http://www.apache.org/licenses/LICENSE-2.0
+ rem
+ rem Unless required by applicable law or agreed to in writing, software
+ rem distributed under the License is distributed on an "AS IS" BASIS,
+ rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ rem See the License for the specific language governing permissions and
+ rem limitations under the License.
+ rem
+ 
 -set SCALA_VERSION=2.9.3
++set SCALA_VERSION=2.10
+ 
+ rem Figure out where the Spark framework is installed
+ set FWDIR=%~dp0..\
+ 
+ rem Export this as SPARK_HOME
+ set SPARK_HOME=%FWDIR%
+ 
+ rem Test whether the user has built Spark
+ if exist "%FWDIR%RELEASE" goto skip_build_test
+ set FOUND_JAR=0
+ for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+   set FOUND_JAR=1
+ )
+ if "%FOUND_JAR%"=="0" (
+   echo Failed to find Spark assembly JAR.
+   echo You need to build Spark with sbt\sbt assembly before running this program.
+   goto exit
+ )
+ :skip_build_test
+ 
+ rem Load environment variables from conf\spark-env.cmd, if it exists
+ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+ 
+ rem Figure out which Python to use.
+ if "x%PYSPARK_PYTHON%"=="x" set PYSPARK_PYTHON=python
+ 
+ set PYTHONPATH=%FWDIR%python;%PYTHONPATH%
+ 
+ set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
+ set PYTHONSTARTUP=%FWDIR%python\pyspark\shell.py
+ 
+ echo Running %PYSPARK_PYTHON% with PYTHONPATH=%PYTHONPATH%
+ 
+ "%PYSPARK_PYTHON%" %*
+ :exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/bin/run-example
----------------------------------------------------------------------
diff --cc bin/run-example
index 0000000,ded08a8..f2699c3
mode 000000,100755..100755
--- a/bin/run-example
+++ b/bin/run-example
@@@ -1,0 -1,81 +1,91 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
 -SCALA_VERSION=2.9.3
++cygwin=false
++case "`uname`" in
++    CYGWIN*) cygwin=true;;
++esac
++
++SCALA_VERSION=2.10
+ 
+ # Figure out where the Scala framework is installed
+ FWDIR="$(cd `dirname $0`/..; pwd)"
+ 
+ # Export this as SPARK_HOME
+ export SPARK_HOME="$FWDIR"
+ 
+ # Load environment variables from conf/spark-env.sh, if it exists
+ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+   . $FWDIR/conf/spark-env.sh
+ fi
+ 
+ if [ -z "$1" ]; then
+   echo "Usage: run-example <example-class> [<args>]" >&2
+   exit 1
+ fi
+ 
+ # Figure out the JAR file that our examples were packaged into. This includes a bit of a hack
+ # to avoid the -sources and -doc packages that are built by publish-local.
+ EXAMPLES_DIR="$FWDIR"/examples
+ SPARK_EXAMPLES_JAR=""
+ if [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
+   # Use the JAR from the SBT build
+   export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
+ fi
+ if [ -e "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar ]; then
+   # Use the JAR from the Maven build
+   # TODO: this also needs to become an assembly!
+   export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/spark-examples*[0-9Tg].jar`
+ fi
+ if [[ -z $SPARK_EXAMPLES_JAR ]]; then
+   echo "Failed to find Spark examples assembly in $FWDIR/examples/target" >&2
+   echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
+   exit 1
+ fi
+ 
+ # Since the examples JAR ideally shouldn't include spark-core (that dependency should be
+ # "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
+ CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
+ CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
+ 
++if $cygwin; then
++    CLASSPATH=`cygpath -wp $CLASSPATH`
++    export SPARK_EXAMPLES_JAR=`cygpath -w $SPARK_EXAMPLES_JAR`
++fi
++
+ # Find java binary
+ if [ -n "${JAVA_HOME}" ]; then
+   RUNNER="${JAVA_HOME}/bin/java"
+ else
+   if [ `command -v java` ]; then
+     RUNNER="java"
+   else
+     echo "JAVA_HOME is not set" >&2
+     exit 1
+   fi
+ fi
+ 
+ if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
+   echo -n "Spark Command: "
+   echo "$RUNNER" -cp "$CLASSPATH" "$@"
+   echo "========================================"
+   echo
+ fi
+ 
+ exec "$RUNNER" -cp "$CLASSPATH" "$@"

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/bin/run-example2.cmd
----------------------------------------------------------------------
diff --cc bin/run-example2.cmd
index 0000000,e250a92..6861334
mode 000000,100644..100644
--- a/bin/run-example2.cmd
+++ b/bin/run-example2.cmd
@@@ -1,0 -1,61 +1,61 @@@
+ @echo off
+ 
+ rem
+ rem Licensed to the Apache Software Foundation (ASF) under one or more
+ rem contributor license agreements.  See the NOTICE file distributed with
+ rem this work for additional information regarding copyright ownership.
+ rem The ASF licenses this file to You under the Apache License, Version 2.0
+ rem (the "License"); you may not use this file except in compliance with
+ rem the License.  You may obtain a copy of the License at
+ rem
+ rem    http://www.apache.org/licenses/LICENSE-2.0
+ rem
+ rem Unless required by applicable law or agreed to in writing, software
+ rem distributed under the License is distributed on an "AS IS" BASIS,
+ rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ rem See the License for the specific language governing permissions and
+ rem limitations under the License.
+ rem
+ 
 -set SCALA_VERSION=2.9.3
++set SCALA_VERSION=2.10
+ 
+ rem Figure out where the Spark framework is installed
+ set FWDIR=%~dp0..\
+ 
+ rem Export this as SPARK_HOME
+ set SPARK_HOME=%FWDIR%
+ 
+ rem Load environment variables from conf\spark-env.cmd, if it exists
+ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+ 
+ rem Test that an argument was given
+ if not "x%1"=="x" goto arg_given
+   echo Usage: run-example ^<example-class^> [^<args^>]
+   goto exit
+ :arg_given
+ 
+ set EXAMPLES_DIR=%FWDIR%examples
+ 
+ rem Figure out the JAR file that our examples were packaged into.
+ set SPARK_EXAMPLES_JAR=
+ for %%d in ("%EXAMPLES_DIR%\target\scala-%SCALA_VERSION%\spark-examples*assembly*.jar") do (
+   set SPARK_EXAMPLES_JAR=%%d
+ )
+ if "x%SPARK_EXAMPLES_JAR%"=="x" (
+   echo Failed to find Spark examples assembly JAR.
+   echo You need to build Spark with sbt\sbt assembly before running this program.
+   goto exit
+ )
+ 
+ rem Compute Spark classpath using external script
+ set DONT_PRINT_CLASSPATH=1
+ call "%FWDIR%sbin\compute-classpath.cmd"
+ set DONT_PRINT_CLASSPATH=0
+ set CLASSPATH=%SPARK_EXAMPLES_JAR%;%CLASSPATH%
+ 
+ rem Figure out where java is.
+ set RUNNER=java
+ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
+ 
+ "%RUNNER%" -cp "%CLASSPATH%" %JAVA_OPTS% %*
+ :exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/bin/spark-shell
----------------------------------------------------------------------
diff --cc bin/spark-shell
index 0000000,6717fe7..bc7386d
mode 000000,100755..100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@@ -1,0 -1,87 +1,102 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ #
+ # Shell script for starting the Spark Shell REPL
+ # Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
+ # if those two env vars are set in spark-env.sh but MASTER is not.
+ # Options:
+ #    -c <cores>    Set the number of cores for REPL to use
 -#
++
++cygwin=false
++case "`uname`" in
++    CYGWIN*) cygwin=true;;
++esac
+ 
+ # Enter posix mode for bash
+ set -o posix
+ 
+ FWDIR="$(cd `dirname $0`/..; pwd)"
+ 
+ for o in "$@"; do
+   if [ "$1" = "-c" -o "$1" = "--cores" ]; then
+     shift
+     if [ -n "$1" ]; then
+       OPTIONS="-Dspark.cores.max=$1"
+       shift
+     fi
+   fi
+ done
+ 
+ # Set MASTER from spark-env if possible
+ if [ -z "$MASTER" ]; then
+   if [ -e "$FWDIR/conf/spark-env.sh" ]; then
+     . "$FWDIR/conf/spark-env.sh"
+   fi
+   if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
+     MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
+     export MASTER
+   fi
+ fi
+ 
+ # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
+ # binary distribution of Spark where Scala is not installed
+ exit_status=127
+ saved_stty=""
+ 
+ # restore stty settings (echo in particular)
+ function restoreSttySettings() {
+   stty $saved_stty
+   saved_stty=""
+ }
+ 
+ function onExit() {
+   if [[ "$saved_stty" != "" ]]; then
+     restoreSttySettings
+   fi
+   exit $exit_status
+ }
+ 
+ # to reenable echo if we are interrupted before completing.
+ trap onExit INT
+ 
+ # save terminal settings
+ saved_stty=$(stty -g 2>/dev/null)
+ # clear on error so we don't later try to restore them
+ if [[ ! $? ]]; then
+   saved_stty=""
+ fi
+ 
 -$FWDIR/sbin/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
++if $cygwin; then
++    # Workaround for issue involving JLine and Cygwin
++    # (see http://sourceforge.net/p/jline/bugs/40/).
++    # If you're using the Mintty terminal emulator in Cygwin, may need to set the
++    # "Backspace sends ^H" setting in "Keys" section of the Mintty options
++    # (see https://github.com/sbt/sbt/issues/562).
++    stty -icanon min 1 -echo > /dev/null 2>&1
++    $FWDIR/sbin/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
++    stty icanon echo > /dev/null 2>&1
++else
++    $FWDIR/sbin/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
++fi
+ 
+ # record the exit status lest it be overwritten:
+ # then reenable echo and propagate the code.
+ exit_status=$?
+ onExit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
index 0881152,15b3397..0494ca8
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala
@@@ -121,23 -119,23 +121,23 @@@ private[spark] class CoarseMesosSchedul
      }
      val command = CommandInfo.newBuilder()
        .setEnvironment(environment)
 -    val driverUrl = "akka://spark@%s:%s/user/%s".format(
 -      System.getProperty("spark.driver.host"),
 -      System.getProperty("spark.driver.port"),
 -      StandaloneSchedulerBackend.ACTOR_NAME)
 -    val uri = System.getProperty("spark.executor.uri")
 +    val driverUrl = "akka.tcp://spark@%s:%s/user/%s".format(
 +      conf.get("spark.driver.host"),
 +      conf.get("spark.driver.port"),
 +      CoarseGrainedSchedulerBackend.ACTOR_NAME)
 +    val uri = conf.get("spark.executor.uri", null)
      if (uri == null) {
-       val runScript = new File(sparkHome, "spark-class").getCanonicalPath
+       val runScript = new File(sparkHome, "./sbin/spark-class").getCanonicalPath
        command.setValue(
 -        "\"%s\" org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
 +        "\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d".format(
            runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
      } else {
        // Grab everything to the first '.'. We'll use that and '*' to
        // glob the directory "correctly".
        val basename = uri.split('/').last.split('.').head
        command.setValue(
-         "cd %s*; ./spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d"
 -        "cd %s*; ./sbin/spark-class org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
 -          basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
++        "cd %s*; ./sbin/spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d"
 +          .format(basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
        command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
      }
      return command.build()

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
index bb278fb,7e9c05c..ae8d527
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
@@@ -100,9 -101,9 +100,9 @@@ private[spark] class MesosSchedulerBack
      }
      val command = CommandInfo.newBuilder()
        .setEnvironment(environment)
 -    val uri = System.getProperty("spark.executor.uri")
 +    val uri = sc.conf.get("spark.executor.uri", null)
      if (uri == null) {
-       command.setValue(new File(sparkHome, "spark-executor").getCanonicalPath)
+       command.setValue(new File(sparkHome, "/sbin/spark-executor").getCanonicalPath)
      } else {
        // Grab everything to the first '.'. We'll use that and '*' to
        // glob the directory "correctly".

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index 58d47a2,b4128b7..f207627
--- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@@ -36,17 -35,17 +36,17 @@@ private[spark] object UIWorkloadGenerat
  
    def main(args: Array[String]) {
      if (args.length < 2) {
-       println("usage: ./spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]")
 -      println("usage: ./sbin/spark-class spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]")
++      println("usage: ./sbin/spark-class org.apache.spark.ui.UIWorkloadGenerator [master] [FIFO|FAIR]")
        System.exit(1)
      }
 -    val master = args(0)
 -    val schedulingMode = SchedulingMode.withName(args(1))
 -    val appName = "Spark UI Tester"
  
 +    val conf = new SparkConf().setMaster(args(0)).setAppName("Spark UI tester")
 +
 +    val schedulingMode = SchedulingMode.withName(args(1))
      if (schedulingMode == SchedulingMode.FAIR) {
 -      System.setProperty("spark.scheduler.mode", "FAIR")
 +      conf.set("spark.scheduler.mode", "FAIR")
      }
 -    val sc = new SparkContext(master, appName)
 +    val sc = new SparkContext(conf)
  
      def setProperties(s: String) = {
        if(schedulingMode == SchedulingMode.FAIR) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/core/src/test/scala/org/apache/spark/DriverSuite.scala
----------------------------------------------------------------------
diff --cc core/src/test/scala/org/apache/spark/DriverSuite.scala
index 6d1695e,5e5ece0..c37fd9a
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@@ -34,8 -34,8 +34,8 @@@ class DriverSuite extends FunSuite wit
      // Regression test for SPARK-530: "Spark driver process doesn't exit after finishing"
      val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
      forAll(masters) { (master: String) =>
 -      failAfter(30 seconds) {
 +      failAfter(60 seconds) {
-         Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master),
+         Utils.execute(Seq("./sbin/spark-class", "org.apache.spark.DriverWithoutCleanup", master),
            new File(System.getenv("SPARK_HOME")))
        }
      }

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/docs/running-on-yarn.md
----------------------------------------------------------------------
diff --cc docs/running-on-yarn.md
index aa75ca4,767eb5c..00cad99
--- a/docs/running-on-yarn.md
+++ b/docs/running-on-yarn.md
@@@ -48,13 -40,9 +48,13 @@@ System Properties
  Ensure that HADOOP_CONF_DIR or YARN_CONF_DIR points to the directory which contains the (client side) configuration files for the hadoop cluster.
  This would be used to connect to the cluster, write to the dfs and submit jobs to the resource manager.
  
 +There are two scheduler mode that can be used to launch spark application on YARN.
 +
 +## Launch spark application by YARN Client with yarn-standalone mode.
 +
  The command to launch the YARN Client is as follows:
  
-     SPARK_JAR=<SPARK_ASSEMBLY_JAR_FILE> ./spark-class org.apache.spark.deploy.yarn.Client \
+     SPARK_JAR=<SPARK_ASSEMBLY_JAR_FILE> ./sbin/spark-class org.apache.spark.deploy.yarn.Client \
        --jar <YOUR_APP_JAR_FILE> \
        --class <APP_MAIN_CLASS> \
        --args <APP_MAIN_ARGUMENTS> \

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/docs/spark-standalone.md
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/make-distribution.sh
----------------------------------------------------------------------
diff --cc make-distribution.sh
index 32bbdb9,13843bb..82638a2
--- a/make-distribution.sh
+++ b/make-distribution.sh
@@@ -95,13 -95,10 +95,10 @@@ cp $FWDIR/assembly/target/scala*/*assem
  
  # Copy other things
  mkdir "$DISTDIR"/conf
 -cp "$FWDIR/conf/*.template" "$DISTDIR"/conf
 +cp "$FWDIR"/conf/*.template "$DISTDIR"/conf
  cp -r "$FWDIR/bin" "$DISTDIR"
  cp -r "$FWDIR/python" "$DISTDIR"
- cp "$FWDIR/spark-class" "$DISTDIR"
- cp "$FWDIR/spark-shell" "$DISTDIR"
- cp "$FWDIR/spark-executor" "$DISTDIR"
- cp "$FWDIR/pyspark" "$DISTDIR"
+ cp -r "$FWDIR/sbin" "$DISTDIR"
  
  
  if [ "$MAKE_TGZ" == "true" ]; then

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/python/pyspark/java_gateway.py
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/python/run-tests
----------------------------------------------------------------------
diff --cc python/run-tests
index 4b71fff,8a08ae3..feba97c
--- a/python/run-tests
+++ b/python/run-tests
@@@ -29,7 -29,7 +29,7 @@@ FAILED=
  rm -f unit-tests.log
  
  function run_test() {
-     SPARK_TESTING=0 $FWDIR/pyspark $1 2>&1 | tee -a unit-tests.log
 -    $FWDIR/bin/pyspark $1 2>&1 | tee -a unit-tests.log
++    SPARK_TESTING=0 $FWDIR/bin/pyspark $1 2>&1 | tee -a unit-tests.log
      FAILED=$((PIPESTATUS[0]||$FAILED))
  }
  

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/repl-bin/src/deb/bin/run
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/repl/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/compute-classpath.cmd
----------------------------------------------------------------------
diff --cc sbin/compute-classpath.cmd
index 0000000,e0b8a8e..4f60bff
mode 000000,100644..100644
--- a/sbin/compute-classpath.cmd
+++ b/sbin/compute-classpath.cmd
@@@ -1,0 -1,69 +1,69 @@@
+ @echo off
+ 
+ rem
+ rem Licensed to the Apache Software Foundation (ASF) under one or more
+ rem contributor license agreements.  See the NOTICE file distributed with
+ rem this work for additional information regarding copyright ownership.
+ rem The ASF licenses this file to You under the Apache License, Version 2.0
+ rem (the "License"); you may not use this file except in compliance with
+ rem the License.  You may obtain a copy of the License at
+ rem
+ rem    http://www.apache.org/licenses/LICENSE-2.0
+ rem
+ rem Unless required by applicable law or agreed to in writing, software
+ rem distributed under the License is distributed on an "AS IS" BASIS,
+ rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ rem See the License for the specific language governing permissions and
+ rem limitations under the License.
+ rem
+ 
+ rem This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
+ rem script and the ExecutorRunner in standalone cluster mode.
+ 
 -set SCALA_VERSION=2.9.3
++set SCALA_VERSION=2.10
+ 
+ rem Figure out where the Spark framework is installed
+ set FWDIR=%~dp0..\
+ 
+ rem Load environment variables from conf\spark-env.cmd, if it exists
+ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+ 
+ rem Build up classpath
+ set CLASSPATH=%FWDIR%conf
+ if exist "%FWDIR%RELEASE" (
+   for %%d in ("%FWDIR%jars\spark-assembly*.jar") do (
+     set ASSEMBLY_JAR=%%d
+   )
+ ) else (
+   for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+     set ASSEMBLY_JAR=%%d
+   )
+ )
+ set CLASSPATH=%CLASSPATH%;%ASSEMBLY_JAR%
+ 
+ if "x%SPARK_TESTING%"=="x1" (
+   rem Add test clases to path
+   set CLASSPATH=%CLASSPATH%;%FWDIR%core\target\scala-%SCALA_VERSION%\test-classes
+   set CLASSPATH=%CLASSPATH%;%FWDIR%repl\target\scala-%SCALA_VERSION%\test-classes
+   set CLASSPATH=%CLASSPATH%;%FWDIR%mllib\target\scala-%SCALA_VERSION%\test-classes
+   set CLASSPATH=%CLASSPATH%;%FWDIR%bagel\target\scala-%SCALA_VERSION%\test-classes
+   set CLASSPATH=%CLASSPATH%;%FWDIR%streaming\target\scala-%SCALA_VERSION%\test-classes
+ )
+ 
+ rem Add hadoop conf dir - else FileSystem.*, etc fail
+ rem Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
+ rem the configurtion files.
+ if "x%HADOOP_CONF_DIR%"=="x" goto no_hadoop_conf_dir
+   set CLASSPATH=%CLASSPATH%;%HADOOP_CONF_DIR%
+ :no_hadoop_conf_dir
+ 
+ if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
+   set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
+ :no_yarn_conf_dir
+ 
+ rem A bit of a hack to allow calling this script within run2.cmd without seeing output
+ if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
+ 
+ echo %CLASSPATH%
+ 
+ :exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/compute-classpath.sh
----------------------------------------------------------------------
diff --cc sbin/compute-classpath.sh
index 0000000,cfe5fe7..0c82310
mode 000000,100755..100755
--- a/sbin/compute-classpath.sh
+++ b/sbin/compute-classpath.sh
@@@ -1,0 -1,61 +1,75 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ # This script computes Spark's classpath and prints it to stdout; it's used by both the "run"
+ # script and the ExecutorRunner in standalone cluster mode.
+ 
 -SCALA_VERSION=2.9.3
++SCALA_VERSION=2.10
+ 
+ # Figure out where Spark is installed
+ FWDIR="$(cd `dirname $0`/..; pwd)"
+ 
+ # Load environment variables from conf/spark-env.sh, if it exists
+ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+   . $FWDIR/conf/spark-env.sh
+ fi
+ 
+ # Build up classpath
 -CLASSPATH="$FWDIR/conf"
 -if [ -f "$FWDIR/RELEASE" ]; then
 -  ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
++CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
++
++# First check if we have a dependencies jar. If so, include binary classes with the deps jar
++if [ -f "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar ]; then
++  CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
++  CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
++  CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
++  CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"
++  CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/classes"
++
++  DEPS_ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*-deps.jar`
++  CLASSPATH="$CLASSPATH:$DEPS_ASSEMBLY_JAR"
+ else
 -  ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar`
++  # Else use spark-assembly jar from either RELEASE or assembly directory
++  if [ -f "$FWDIR/RELEASE" ]; then
++    ASSEMBLY_JAR=`ls "$FWDIR"/jars/spark-assembly*.jar`
++  else
++    ASSEMBLY_JAR=`ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar`
++  fi
++  CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
+ fi
 -CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
+ 
+ # Add test classes if we're running from SBT or Maven with SPARK_TESTING set to 1
+ if [[ $SPARK_TESTING == 1 ]]; then
+   CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/test-classes"
+   CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/test-classes"
+   CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/test-classes"
+   CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/test-classes"
+   CLASSPATH="$CLASSPATH:$FWDIR/streaming/target/scala-$SCALA_VERSION/test-classes"
+ fi
+ 
+ # Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail !
+ # Note, this assumes that there is either a HADOOP_CONF_DIR or YARN_CONF_DIR which hosts
+ # the configurtion files.
+ if [ "x" != "x$HADOOP_CONF_DIR" ]; then
+   CLASSPATH="$CLASSPATH:$HADOOP_CONF_DIR"
+ fi
+ if [ "x" != "x$YARN_CONF_DIR" ]; then
+   CLASSPATH="$CLASSPATH:$YARN_CONF_DIR"
+ fi
+ 
+ echo "$CLASSPATH"

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/slaves.sh
----------------------------------------------------------------------
diff --cc sbin/slaves.sh
index 0000000,68408bc..a5bc218
mode 000000,100755..100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@@ -1,0 -1,74 +1,91 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ # Run a shell command on all slave hosts.
+ #
+ # Environment Variables
+ #
+ #   SPARK_SLAVES    File naming remote hosts.
+ #     Default is ${SPARK_CONF_DIR}/slaves.
+ #   SPARK_CONF_DIR  Alternate conf dir. Default is ${SPARK_HOME}/conf.
+ #   SPARK_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
+ #   SPARK_SSH_OPTS Options passed to ssh when running remote commands.
+ ##
+ 
 -usage="Usage: slaves.sh [--config confdir] command..."
++usage="Usage: slaves.sh [--config <conf-dir>] command..."
+ 
+ # if no args specified, show usage
+ if [ $# -le 0 ]; then
+   echo $usage
+   exit 1
+ fi
+ 
+ sbin=`dirname "$0"`
+ sbin=`cd "$sbin"; pwd`
+ 
+ . "$sbin/spark-config.sh"
+ 
+ # If the slaves file is specified in the command line,
+ # then it takes precedence over the definition in
+ # spark-env.sh. Save it here.
+ HOSTLIST=$SPARK_SLAVES
+ 
++# Check if --config is passed as an argument. It is an optional parameter.
++# Exit if the argument is not a directory.
++if [ "$1" == "--config" ]
++then
++  shift
++  conf_dir=$1
++  if [ ! -d "$conf_dir" ]
++  then
++    echo "ERROR : $conf_dir is not a directory"
++    echo $usage
++    exit 1
++  else
++    export SPARK_CONF_DIR=$conf_dir
++  fi
++  shift
++fi
++
+ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
+   . "${SPARK_CONF_DIR}/spark-env.sh"
+ fi
+ 
+ if [ "$HOSTLIST" = "" ]; then
+   if [ "$SPARK_SLAVES" = "" ]; then
+     export HOSTLIST="${SPARK_CONF_DIR}/slaves"
+   else
+     export HOSTLIST="${SPARK_SLAVES}"
+   fi
+ fi
+ 
+ # By default disable strict host key checking
+ if [ "$SPARK_SSH_OPTS" = "" ]; then
+   SPARK_SSH_OPTS="-o StrictHostKeyChecking=no"
+ fi
+ 
+ for slave in `cat "$HOSTLIST"|sed  "s/#.*$//;/^$/d"`; do
+  ssh $SPARK_SSH_OPTS $slave $"${@// /\\ }" \
+    2>&1 | sed "s/^/$slave: /" &
+  if [ "$SPARK_SLAVE_SLEEP" != "" ]; then
+    sleep $SPARK_SLAVE_SLEEP
+  fi
+ done
+ 
+ wait

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/spark-class
----------------------------------------------------------------------
diff --cc sbin/spark-class
index 0000000,3bdc29e..4e440d8
mode 000000,100755..100755
--- a/sbin/spark-class
+++ b/sbin/spark-class
@@@ -1,0 -1,117 +1,154 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
 -SCALA_VERSION=2.9.3
++cygwin=false
++case "`uname`" in
++    CYGWIN*) cygwin=true;;
++esac
++
++SCALA_VERSION=2.10
+ 
+ # Figure out where the Scala framework is installed
+ FWDIR="$(cd `dirname $0`/..; pwd)"
+ 
+ # Export this as SPARK_HOME
+ export SPARK_HOME="$FWDIR"
+ 
+ # Load environment variables from conf/spark-env.sh, if it exists
+ if [ -e "$FWDIR/conf/spark-env.sh" ] ; then
+   . $FWDIR/conf/spark-env.sh
+ fi
+ 
+ if [ -z "$1" ]; then
+   echo "Usage: spark-class <class> [<args>]" >&2
+   exit 1
+ fi
+ 
+ # If this is a standalone cluster daemon, reset SPARK_JAVA_OPTS and SPARK_MEM to reasonable
+ # values for that; it doesn't need a lot
+ if [ "$1" = "org.apache.spark.deploy.master.Master" -o "$1" = "org.apache.spark.deploy.worker.Worker" ]; then
+   SPARK_MEM=${SPARK_DAEMON_MEMORY:-512m}
+   SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.akka.logLifecycleEvents=true"
+   # Do not overwrite SPARK_JAVA_OPTS environment variable in this script
+   OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS"   # Empty by default
+ else
+   OUR_JAVA_OPTS="$SPARK_JAVA_OPTS"
+ fi
+ 
+ 
+ # Add java opts for master, worker, executor. The opts maybe null
+ case "$1" in
+   'org.apache.spark.deploy.master.Master')
+     OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_MASTER_OPTS"
+     ;;
+   'org.apache.spark.deploy.worker.Worker')
+     OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_WORKER_OPTS"
+     ;;
 -  'org.apache.spark.executor.StandaloneExecutorBackend')
++  'org.apache.spark.executor.CoarseGrainedExecutorBackend')
+     OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
+     ;;
+   'org.apache.spark.executor.MesosExecutorBackend')
+     OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
+     ;;
+   'org.apache.spark.repl.Main')
+     OUR_JAVA_OPTS="$OUR_JAVA_OPTS $SPARK_REPL_OPTS"
+     ;;
+ esac
+ 
+ # Find the java binary
+ if [ -n "${JAVA_HOME}" ]; then
+   RUNNER="${JAVA_HOME}/bin/java"
+ else
+   if [ `command -v java` ]; then
+     RUNNER="java"
+   else
+     echo "JAVA_HOME is not set" >&2
+     exit 1
+   fi
+ fi
+ 
+ # Set SPARK_MEM if it isn't already set since we also use it for this process
+ SPARK_MEM=${SPARK_MEM:-512m}
+ export SPARK_MEM
+ 
+ # Set JAVA_OPTS to be able to load native libraries and to set heap size
+ JAVA_OPTS="$OUR_JAVA_OPTS"
+ JAVA_OPTS="$JAVA_OPTS -Djava.library.path=$SPARK_LIBRARY_PATH"
+ JAVA_OPTS="$JAVA_OPTS -Xms$SPARK_MEM -Xmx$SPARK_MEM"
+ # Load extra JAVA_OPTS from conf/java-opts, if it exists
+ if [ -e "$FWDIR/conf/java-opts" ] ; then
+   JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
+ fi
+ export JAVA_OPTS
+ # Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
+ 
+ if [ ! -f "$FWDIR/RELEASE" ]; then
+   # Exit if the user hasn't compiled Spark
 -  ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
 -  if [[ $? != 0 ]]; then
 -    echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2
 -    echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
++  num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar" | wc -l)
++  jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar")
++  if [ "$num_jars" -eq "0" ]; then
++    echo "Failed to find Spark assembly in $FWDIR/assembly/target/scala-$SCALA_VERSION/" >&2
++    echo "You need to build Spark with 'sbt/sbt assembly' before running this program." >&2
++    exit 1
++  fi
++  if [ "$num_jars" -gt "1" ]; then
++    echo "Found multiple Spark assembly jars in $FWDIR/assembly/target/scala-$SCALA_VERSION:" >&2
++    echo "$jars_list"
++    echo "Please remove all but one jar."
+     exit 1
+   fi
+ fi
+ 
++TOOLS_DIR="$FWDIR"/tools
++SPARK_TOOLS_JAR=""
++if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
++  # Use the JAR from the SBT build
++  export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
++fi
++if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
++  # Use the JAR from the Maven build
++  # TODO: this also needs to become an assembly!
++  export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
++fi
++
+ # Compute classpath using external script
+ CLASSPATH=`$FWDIR/sbin/compute-classpath.sh`
++
++if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
++  CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
++fi
++
++if $cygwin; then
++  CLASSPATH=`cygpath -wp $CLASSPATH`
++  if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
++    export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
++  fi
++fi
+ export CLASSPATH
+ 
+ if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
+   echo -n "Spark Command: "
+   echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
+   echo "========================================"
+   echo
+ fi
+ 
+ exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
++
++

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/spark-class2.cmd
----------------------------------------------------------------------
diff --cc sbin/spark-class2.cmd
index 0000000,5e00bd3..460e661
mode 000000,100644..100644
--- a/sbin/spark-class2.cmd
+++ b/sbin/spark-class2.cmd
@@@ -1,0 -1,78 +1,85 @@@
+ @echo off
+ 
+ rem
+ rem Licensed to the Apache Software Foundation (ASF) under one or more
+ rem contributor license agreements.  See the NOTICE file distributed with
+ rem this work for additional information regarding copyright ownership.
+ rem The ASF licenses this file to You under the Apache License, Version 2.0
+ rem (the "License"); you may not use this file except in compliance with
+ rem the License.  You may obtain a copy of the License at
+ rem
+ rem    http://www.apache.org/licenses/LICENSE-2.0
+ rem
+ rem Unless required by applicable law or agreed to in writing, software
+ rem distributed under the License is distributed on an "AS IS" BASIS,
+ rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ rem See the License for the specific language governing permissions and
+ rem limitations under the License.
+ rem
+ 
 -set SCALA_VERSION=2.9.3
++set SCALA_VERSION=2.10
+ 
+ rem Figure out where the Spark framework is installed
+ set FWDIR=%~dp0..\
+ 
+ rem Export this as SPARK_HOME
+ set SPARK_HOME=%FWDIR%
+ 
+ rem Load environment variables from conf\spark-env.cmd, if it exists
+ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
+ 
+ rem Test that an argument was given
+ if not "x%1"=="x" goto arg_given
+   echo Usage: spark-class ^<class^> [^<args^>]
+   goto exit
+ :arg_given
+ 
+ set RUNNING_DAEMON=0
+ if "%1"=="spark.deploy.master.Master" set RUNNING_DAEMON=1
+ if "%1"=="spark.deploy.worker.Worker" set RUNNING_DAEMON=1
+ if "x%SPARK_DAEMON_MEMORY%" == "x" set SPARK_DAEMON_MEMORY=512m
+ set SPARK_DAEMON_JAVA_OPTS=%SPARK_DAEMON_JAVA_OPTS% -Dspark.akka.logLifecycleEvents=true
+ if "%RUNNING_DAEMON%"=="1" set SPARK_MEM=%SPARK_DAEMON_MEMORY%
+ rem Do not overwrite SPARK_JAVA_OPTS environment variable in this script
+ if "%RUNNING_DAEMON%"=="0" set OUR_JAVA_OPTS=%SPARK_JAVA_OPTS%
+ if "%RUNNING_DAEMON%"=="1" set OUR_JAVA_OPTS=%SPARK_DAEMON_JAVA_OPTS%
+ 
+ rem Figure out how much memory to use per executor and set it as an environment
+ rem variable so that our process sees it and can report it to Mesos
+ if "x%SPARK_MEM%"=="x" set SPARK_MEM=512m
+ 
+ rem Set JAVA_OPTS to be able to load native libraries and to set heap size
+ set JAVA_OPTS=%OUR_JAVA_OPTS% -Djava.library.path=%SPARK_LIBRARY_PATH% -Xms%SPARK_MEM% -Xmx%SPARK_MEM%
+ rem Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
+ 
+ rem Test whether the user has built Spark
+ if exist "%FWDIR%RELEASE" goto skip_build_test
+ set FOUND_JAR=0
+ for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*.jar") do (
+   set FOUND_JAR=1
+ )
+ if "%FOUND_JAR%"=="0" (
+   echo Failed to find Spark assembly JAR.
+   echo You need to build Spark with sbt\sbt assembly before running this program.
+   goto exit
+ )
+ :skip_build_test
+ 
++set TOOLS_DIR=%FWDIR%tools
++set SPARK_TOOLS_JAR=
++for %%d in ("%TOOLS_DIR%\target\scala-%SCALA_VERSION%\spark-tools*assembly*.jar") do (
++  set SPARK_TOOLS_JAR=%%d
++)
++
+ rem Compute classpath using external script
+ set DONT_PRINT_CLASSPATH=1
+ call "%FWDIR%sbin\compute-classpath.cmd"
+ set DONT_PRINT_CLASSPATH=0
++set CLASSPATH=%CLASSPATH%;%SPARK_TOOLS_JAR%
+ 
+ rem Figure out where java is.
+ set RUNNER=java
+ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
+ 
+ "%RUNNER%" -cp "%CLASSPATH%" %JAVA_OPTS% %*
+ :exit

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/spark-daemon.sh
----------------------------------------------------------------------
diff --cc sbin/spark-daemon.sh
index 0000000,ae82349..ca6b893
mode 000000,100755..100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@@ -1,0 -1,164 +1,183 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ # Runs a Spark command as a daemon.
+ #
+ # Environment Variables
+ #
+ #   SPARK_CONF_DIR  Alternate conf dir. Default is ${SPARK_PREFIX}/conf.
+ #   SPARK_LOG_DIR   Where log files are stored.  PWD by default.
+ #   SPARK_MASTER    host:path where spark code should be rsync'd from
+ #   SPARK_PID_DIR   The pid files are stored. /tmp by default.
+ #   SPARK_IDENT_STRING   A string representing this instance of spark. $USER by default
+ #   SPARK_NICENESS The scheduling priority for daemons. Defaults to 0.
+ ##
+ 
 -usage="Usage: spark-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] (start|stop) <spark-command> <spark-instance-number> <args...>"
++usage="Usage: spark-daemon.sh [--config <conf-dir>] (start|stop) <spark-command> <spark-instance-number> <args...>"
+ 
+ # if no args specified, show usage
+ if [ $# -le 1 ]; then
+   echo $usage
+   exit 1
+ fi
+ 
+ sbin=`dirname "$0"`
+ sbin=`cd "$sbin"; pwd`
+ 
+ . "$sbin/spark-config.sh"
+ 
+ # get arguments
++
++# Check if --config is passed as an argument. It is an optional parameter.
++# Exit if the argument is not a directory.
++
++if [ "$1" == "--config" ]
++then
++  shift
++  conf_dir=$1
++  if [ ! -d "$conf_dir" ]
++  then
++    echo "ERROR : $conf_dir is not a directory"
++    echo $usage
++    exit 1
++  else
++    export SPARK_CONF_DIR=$conf_dir
++  fi
++  shift
++fi
++
+ startStop=$1
+ shift
+ command=$1
+ shift
+ instance=$1
+ shift
+ 
+ spark_rotate_log ()
+ {
+     log=$1;
+     num=5;
+     if [ -n "$2" ]; then
+ 	num=$2
+     fi
+     if [ -f "$log" ]; then # rotate logs
+ 	while [ $num -gt 1 ]; do
+ 	    prev=`expr $num - 1`
+ 	    [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
+ 	    num=$prev
+ 	done
+ 	mv "$log" "$log.$num";
+     fi
+ }
+ 
+ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
+   . "${SPARK_CONF_DIR}/spark-env.sh"
+ fi
+ 
+ if [ "$SPARK_IDENT_STRING" = "" ]; then
+   export SPARK_IDENT_STRING="$USER"
+ fi
+ 
+ 
+ export SPARK_PRINT_LAUNCH_COMMAND="1"
+ 
+ # get log directory
+ if [ "$SPARK_LOG_DIR" = "" ]; then
+   export SPARK_LOG_DIR="$SPARK_HOME/logs"
+ fi
+ mkdir -p "$SPARK_LOG_DIR"
+ touch $SPARK_LOG_DIR/.spark_test > /dev/null 2>&1
+ TEST_LOG_DIR=$?
+ if [ "${TEST_LOG_DIR}" = "0" ]; then
+   rm -f $SPARK_LOG_DIR/.spark_test
+ else
+   chown $SPARK_IDENT_STRING $SPARK_LOG_DIR
+ fi
+ 
+ if [ "$SPARK_PID_DIR" = "" ]; then
+   SPARK_PID_DIR=/tmp
+ fi
+ 
+ # some variables
+ export SPARK_LOGFILE=spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.log
+ export SPARK_ROOT_LOGGER="INFO,DRFA"
+ log=$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out
+ pid=$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid
+ 
+ # Set default scheduling priority
+ if [ "$SPARK_NICENESS" = "" ]; then
+     export SPARK_NICENESS=0
+ fi
+ 
+ 
+ case $startStop in
+ 
+   (start)
+ 
+     mkdir -p "$SPARK_PID_DIR"
+ 
+     if [ -f $pid ]; then
+       if kill -0 `cat $pid` > /dev/null 2>&1; then
+         echo $command running as process `cat $pid`.  Stop it first.
+         exit 1
+       fi
+     fi
+ 
+     if [ "$SPARK_MASTER" != "" ]; then
+       echo rsync from $SPARK_MASTER
+       rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
+     fi
+ 
+     spark_rotate_log "$log"
+     echo starting $command, logging to $log
+     cd "$SPARK_PREFIX"
+     nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/sbin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
+     newpid=$!
+     echo $newpid > $pid
+     sleep 2
+     # Check if the process has died; in that case we'll tail the log so the user can see
+     if ! kill -0 $newpid >/dev/null 2>&1; then
+       echo "failed to launch $command:"
+       tail -2 "$log" | sed 's/^/  /'
+       echo "full log in $log"
+     fi
+     ;;
+ 
+   (stop)
+ 
+     if [ -f $pid ]; then
+       if kill -0 `cat $pid` > /dev/null 2>&1; then
+         echo stopping $command
+         kill `cat $pid`
+       else
+         echo no $command to stop
+       fi
+     else
+       echo no $command to stop
+     fi
+     ;;
+ 
+   (*)
+     echo $usage
+     exit 1
+     ;;
+ 
+ esac
+ 
+ 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/spark-daemons.sh
----------------------------------------------------------------------
diff --cc sbin/spark-daemons.sh
index 0000000,d91254b..5d9f2bb
mode 000000,100755..100755
--- a/sbin/spark-daemons.sh
+++ b/sbin/spark-daemons.sh
@@@ -1,0 -1,35 +1,35 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
+ # Run a Spark command on all slave hosts.
+ 
 -usage="Usage: spark-daemons.sh [--config confdir] [--hosts hostlistfile] [start|stop] command instance-number args..."
++usage="Usage: spark-daemons.sh [--config <conf-dir>] [start|stop] command instance-number args..."
+ 
+ # if no args specified, show usage
+ if [ $# -le 1 ]; then
+   echo $usage
+   exit 1
+ fi
+ 
+ sbin=`dirname "$0"`
+ sbin=`cd "$sbin"; pwd`
+ 
+ . "$sbin/spark-config.sh"
+ 
+ exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/980afd28/sbin/stop-slaves.sh
----------------------------------------------------------------------
diff --cc sbin/stop-slaves.sh
index 0000000,63802e6..c6b0b6a
mode 000000,100755..100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@@ -1,0 -1,37 +1,35 @@@
+ #!/usr/bin/env bash
+ 
+ #
+ # Licensed to the Apache Software Foundation (ASF) under one or more
+ # contributor license agreements.  See the NOTICE file distributed with
+ # this work for additional information regarding copyright ownership.
+ # The ASF licenses this file to You under the Apache License, Version 2.0
+ # (the "License"); you may not use this file except in compliance with
+ # the License.  You may obtain a copy of the License at
+ #
+ #    http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #
+ 
 -# Starts the master on the machine this script is executed on.
 -
 -sbin=`dirname "$0"`
 -sbin=`cd "$sbin"; pwd`
++bin=`dirname "$0"`
++bin=`cd "$sbin"; pwd`
+ 
+ . "$sbin/spark-config.sh"
+ 
+ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
+   . "${SPARK_CONF_DIR}/spark-env.sh"
+ fi
+ 
+ if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
+   "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
+ else
+   for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
+     "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
+   done
+ fi