You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/02/10 10:54:29 UTC

spark git commit: [SPARK-11518][DEPLOY, WINDOWS] Handle spaces in Windows command scripts

Repository: spark
Updated Branches:
  refs/heads/master 9269036d8 -> 2ba9b6a2d


[SPARK-11518][DEPLOY, WINDOWS] Handle spaces in Windows command scripts

Author: Jon Maurer <tr...@gmail.com>
Author: Jonathan Maurer <jm...@Jonathans-MacBook-Pro.local>

Closes #10789 from tritab/cmd_updates.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2ba9b6a2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2ba9b6a2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2ba9b6a2

Branch: refs/heads/master
Commit: 2ba9b6a2dfff8eb06b6f93024f5140e784b8be49
Parents: 9269036
Author: Jon Maurer <tr...@gmail.com>
Authored: Wed Feb 10 09:54:22 2016 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Wed Feb 10 09:54:22 2016 +0000

----------------------------------------------------------------------
 bin/beeline.cmd        |  2 +-
 bin/load-spark-env.cmd |  6 +++---
 bin/pyspark.cmd        |  2 +-
 bin/pyspark2.cmd       |  4 ++--
 bin/run-example.cmd    |  2 +-
 bin/run-example2.cmd   | 15 ++++++---------
 bin/spark-class.cmd    |  2 +-
 bin/spark-class2.cmd   | 10 +++++-----
 bin/spark-shell.cmd    |  2 +-
 bin/spark-shell2.cmd   |  2 +-
 bin/spark-submit.cmd   |  2 +-
 bin/spark-submit2.cmd  |  2 +-
 bin/sparkR.cmd         |  2 +-
 bin/sparkR2.cmd        |  4 ++--
 14 files changed, 27 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/beeline.cmd
----------------------------------------------------------------------
diff --git a/bin/beeline.cmd b/bin/beeline.cmd
index 8293f31..8ddaa41 100644
--- a/bin/beeline.cmd
+++ b/bin/beeline.cmd
@@ -18,4 +18,4 @@ rem limitations under the License.
 rem
 
 set SPARK_HOME=%~dp0..
-cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*
+cmd /V /E /C "%SPARK_HOME%\bin\spark-class.cmd" org.apache.hive.beeline.BeeLine %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/load-spark-env.cmd
----------------------------------------------------------------------
diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd
index 59080ed..0977025 100644
--- a/bin/load-spark-env.cmd
+++ b/bin/load-spark-env.cmd
@@ -27,7 +27,7 @@ if [%SPARK_ENV_LOADED%] == [] (
   if not [%SPARK_CONF_DIR%] == [] (
     set user_conf_dir=%SPARK_CONF_DIR%
   ) else (
-    set user_conf_dir=%~dp0..\conf
+    set user_conf_dir=..\conf
   )
 
   call :LoadSparkEnv
@@ -35,8 +35,8 @@ if [%SPARK_ENV_LOADED%] == [] (
 
 rem Setting SPARK_SCALA_VERSION if not already set.
 
-set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11
-set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10
+set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
+set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10"
 
 if [%SPARK_SCALA_VERSION%] == [] (
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/pyspark.cmd
----------------------------------------------------------------------
diff --git a/bin/pyspark.cmd b/bin/pyspark.cmd
index 7c26fbb..72d046a 100644
--- a/bin/pyspark.cmd
+++ b/bin/pyspark.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running PySpark. To avoid polluting the
 rem environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0pyspark2.cmd %*
+cmd /V /E /C "%~dp0pyspark2.cmd" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/pyspark2.cmd
----------------------------------------------------------------------
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 51d6d15..21fe281 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -20,7 +20,7 @@ rem
 rem Figure out where the Spark framework is installed
 set SPARK_HOME=%~dp0..
 
-call %SPARK_HOME%\bin\load-spark-env.cmd
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
 set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
 
 rem Figure out which Python to use.
@@ -35,4 +35,4 @@ set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.9.1-src.zip;%PYTHONPATH%
 set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
 set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
 
-call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %*
+call "%SPARK_HOME%\bin\spark-submit2.cmd" pyspark-shell-main --name "PySparkShell" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/run-example.cmd
----------------------------------------------------------------------
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
index 5b2d048..64f6bc3 100644
--- a/bin/run-example.cmd
+++ b/bin/run-example.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running a Spark example. To avoid polluting
 rem the environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0run-example2.cmd %*
+cmd /V /E /C "%~dp0run-example2.cmd" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/run-example2.cmd
----------------------------------------------------------------------
diff --git a/bin/run-example2.cmd b/bin/run-example2.cmd
index c3e0221..fada435 100644
--- a/bin/run-example2.cmd
+++ b/bin/run-example2.cmd
@@ -20,12 +20,9 @@ rem
 set SCALA_VERSION=2.10
 
 rem Figure out where the Spark framework is installed
-set FWDIR=%~dp0..\
+set SPARK_HOME=%~dp0..
 
-rem Export this as SPARK_HOME
-set SPARK_HOME=%FWDIR%
-
-call %SPARK_HOME%\bin\load-spark-env.cmd
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
 rem Test that an argument was given
 if not "x%1"=="x" goto arg_given
@@ -36,12 +33,12 @@ if not "x%1"=="x" goto arg_given
   goto exit
 :arg_given
 
-set EXAMPLES_DIR=%FWDIR%examples
+set EXAMPLES_DIR=%SPARK_HOME%\examples
 
 rem Figure out the JAR file that our examples were packaged into.
 set SPARK_EXAMPLES_JAR=
-if exist "%FWDIR%RELEASE" (
-  for %%d in ("%FWDIR%lib\spark-examples*.jar") do (
+if exist "%SPARK_HOME%\RELEASE" (
+  for %%d in ("%SPARK_HOME%\lib\spark-examples*.jar") do (
     set SPARK_EXAMPLES_JAR=%%d
   )
 ) else (
@@ -80,7 +77,7 @@ if "%~1" neq "" (
 )
 if defined ARGS set ARGS=%ARGS:~1%
 
-call "%FWDIR%bin\spark-submit.cmd" ^
+call "%SPARK_HOME%\bin\spark-submit.cmd" ^
   --master %EXAMPLE_MASTER% ^
   --class %EXAMPLE_CLASS% ^
   "%SPARK_EXAMPLES_JAR%" %ARGS%

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-class.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-class.cmd b/bin/spark-class.cmd
index 19850db..3bf3d20 100644
--- a/bin/spark-class.cmd
+++ b/bin/spark-class.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running a Spark class. To avoid polluting
 rem the environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0spark-class2.cmd %*
+cmd /V /E /C "%~dp0spark-class2.cmd" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-class2.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd
index db09fa2..c4fadb8 100644
--- a/bin/spark-class2.cmd
+++ b/bin/spark-class2.cmd
@@ -20,7 +20,7 @@ rem
 rem Figure out where the Spark framework is installed
 set SPARK_HOME=%~dp0..
 
-call %SPARK_HOME%\bin\load-spark-env.cmd
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
 rem Test that an argument was given
 if "x%1"=="x" (
@@ -32,9 +32,9 @@ rem Find assembly jar
 set SPARK_ASSEMBLY_JAR=0
 
 if exist "%SPARK_HOME%\RELEASE" (
-  set ASSEMBLY_DIR=%SPARK_HOME%\lib
+  set ASSEMBLY_DIR="%SPARK_HOME%\lib"
 ) else (
-  set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
+  set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%"
 )
 
 for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
@@ -50,7 +50,7 @@ set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR%
 
 rem Add the launcher build dir to the classpath if requested.
 if not "x%SPARK_PREPEND_CLASSES%"=="x" (
-  set LAUNCH_CLASSPATH=%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%
+  set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%"
 )
 
 set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR%
@@ -62,7 +62,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
 rem The launcher library prints the command to be executed in a single line suitable for being
 rem executed by the batch interpreter. So read all the output of the launcher into a variable.
 set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt
-"%RUNNER%" -cp %LAUNCH_CLASSPATH% org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
+"%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
 for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
   set SPARK_CMD=%%i
 )

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-shell.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-shell.cmd b/bin/spark-shell.cmd
index 8f90ba5..991423d 100644
--- a/bin/spark-shell.cmd
+++ b/bin/spark-shell.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running Spark shell. To avoid polluting the
 rem environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0spark-shell2.cmd %*
+cmd /V /E /C "%~dp0spark-shell2.cmd" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-shell2.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd
index b9b0f51..7b5d396 100644
--- a/bin/spark-shell2.cmd
+++ b/bin/spark-shell2.cmd
@@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
 set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
 
 :run_shell
-%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %*
+"%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.repl.Main --name "Spark shell" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-submit.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-submit.cmd b/bin/spark-submit.cmd
index 8f3b84c..f121b62 100644
--- a/bin/spark-submit.cmd
+++ b/bin/spark-submit.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running Spark submit. To avoid polluting the
 rem environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0spark-submit2.cmd %*
+cmd /V /E /C spark-submit2.cmd %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/spark-submit2.cmd
----------------------------------------------------------------------
diff --git a/bin/spark-submit2.cmd b/bin/spark-submit2.cmd
index 651376e..49e350f 100644
--- a/bin/spark-submit2.cmd
+++ b/bin/spark-submit2.cmd
@@ -24,4 +24,4 @@ rem disable randomized hash for string in Python 3.3+
 set PYTHONHASHSEED=0
 
 set CLASS=org.apache.spark.deploy.SparkSubmit
-%~dp0spark-class2.cmd %CLASS% %*
+"%~dp0spark-class2.cmd" %CLASS% %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/sparkR.cmd
----------------------------------------------------------------------
diff --git a/bin/sparkR.cmd b/bin/sparkR.cmd
index d7b6018..1e5ea6a 100644
--- a/bin/sparkR.cmd
+++ b/bin/sparkR.cmd
@@ -20,4 +20,4 @@ rem
 rem This is the entry point for running SparkR. To avoid polluting the
 rem environment, it just launches a new cmd to do the real work.
 
-cmd /V /E /C %~dp0sparkR2.cmd %*
+cmd /V /E /C "%~dp0sparkR2.cmd" %*

http://git-wip-us.apache.org/repos/asf/spark/blob/2ba9b6a2/bin/sparkR2.cmd
----------------------------------------------------------------------
diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd
index e47f22c..459b780 100644
--- a/bin/sparkR2.cmd
+++ b/bin/sparkR2.cmd
@@ -20,7 +20,7 @@ rem
 rem Figure out where the Spark framework is installed
 set SPARK_HOME=%~dp0..
 
-call %SPARK_HOME%\bin\load-spark-env.cmd
+call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
 
-call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %*
+call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org