[SPARK-11518][DEPLOY, WINDOWS] Handle spaces in Windows command scripts

Author: Jon Maurer <tritab@gmail.com>
Author: Jonathan Maurer <jmaurer@Jonathans-MacBook-Pro.local>

Closes #10789 from tritab/cmd_updates.
This commit is contained in:
Jon Maurer 2016-02-10 09:54:22 +00:00 committed by Sean Owen
parent 9269036d8c
commit 2ba9b6a2df
14 changed files with 27 additions and 30 deletions

View file

@ -18,4 +18,4 @@ rem limitations under the License.
rem rem
set SPARK_HOME=%~dp0.. set SPARK_HOME=%~dp0..
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %* cmd /V /E /C "%SPARK_HOME%\bin\spark-class.cmd" org.apache.hive.beeline.BeeLine %*

View file

@ -27,7 +27,7 @@ if [%SPARK_ENV_LOADED%] == [] (
if not [%SPARK_CONF_DIR%] == [] ( if not [%SPARK_CONF_DIR%] == [] (
set user_conf_dir=%SPARK_CONF_DIR% set user_conf_dir=%SPARK_CONF_DIR%
) else ( ) else (
set user_conf_dir=%~dp0..\conf set user_conf_dir=..\conf
) )
call :LoadSparkEnv call :LoadSparkEnv
@ -35,8 +35,8 @@ if [%SPARK_ENV_LOADED%] == [] (
rem Setting SPARK_SCALA_VERSION if not already set. rem Setting SPARK_SCALA_VERSION if not already set.
set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11 set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10 set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.10"
if [%SPARK_SCALA_VERSION%] == [] ( if [%SPARK_SCALA_VERSION%] == [] (

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running PySpark. To avoid polluting the rem This is the entry point for running PySpark. To avoid polluting the
rem environment, it just launches a new cmd to do the real work. rem environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0pyspark2.cmd %* cmd /V /E /C "%~dp0pyspark2.cmd" %*

View file

@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0.. set SPARK_HOME=%~dp0..
call %SPARK_HOME%\bin\load-spark-env.cmd call "%SPARK_HOME%\bin\load-spark-env.cmd"
set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
rem Figure out which Python to use. rem Figure out which Python to use.
@ -35,4 +35,4 @@ set PYTHONPATH=%SPARK_HOME%\python\lib\py4j-0.9.1-src.zip;%PYTHONPATH%
set OLD_PYTHONSTARTUP=%PYTHONSTARTUP% set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %* call "%SPARK_HOME%\bin\spark-submit2.cmd" pyspark-shell-main --name "PySparkShell" %*

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running a Spark example. To avoid polluting rem This is the entry point for running a Spark example. To avoid polluting
rem the environment, it just launches a new cmd to do the real work. rem the environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0run-example2.cmd %* cmd /V /E /C "%~dp0run-example2.cmd" %*

View file

@ -20,12 +20,9 @@ rem
set SCALA_VERSION=2.10 set SCALA_VERSION=2.10
rem Figure out where the Spark framework is installed rem Figure out where the Spark framework is installed
set FWDIR=%~dp0..\ set SPARK_HOME=%~dp0..
rem Export this as SPARK_HOME call "%SPARK_HOME%\bin\load-spark-env.cmd"
set SPARK_HOME=%FWDIR%
call %SPARK_HOME%\bin\load-spark-env.cmd
rem Test that an argument was given rem Test that an argument was given
if not "x%1"=="x" goto arg_given if not "x%1"=="x" goto arg_given
@ -36,12 +33,12 @@ if not "x%1"=="x" goto arg_given
goto exit goto exit
:arg_given :arg_given
set EXAMPLES_DIR=%FWDIR%examples set EXAMPLES_DIR=%SPARK_HOME%\examples
rem Figure out the JAR file that our examples were packaged into. rem Figure out the JAR file that our examples were packaged into.
set SPARK_EXAMPLES_JAR= set SPARK_EXAMPLES_JAR=
if exist "%FWDIR%RELEASE" ( if exist "%SPARK_HOME%\RELEASE" (
for %%d in ("%FWDIR%lib\spark-examples*.jar") do ( for %%d in ("%SPARK_HOME%\lib\spark-examples*.jar") do (
set SPARK_EXAMPLES_JAR=%%d set SPARK_EXAMPLES_JAR=%%d
) )
) else ( ) else (
@ -80,7 +77,7 @@ if "%~1" neq "" (
) )
if defined ARGS set ARGS=%ARGS:~1% if defined ARGS set ARGS=%ARGS:~1%
call "%FWDIR%bin\spark-submit.cmd" ^ call "%SPARK_HOME%\bin\spark-submit.cmd" ^
--master %EXAMPLE_MASTER% ^ --master %EXAMPLE_MASTER% ^
--class %EXAMPLE_CLASS% ^ --class %EXAMPLE_CLASS% ^
"%SPARK_EXAMPLES_JAR%" %ARGS% "%SPARK_EXAMPLES_JAR%" %ARGS%

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running a Spark class. To avoid polluting rem This is the entry point for running a Spark class. To avoid polluting
rem the environment, it just launches a new cmd to do the real work. rem the environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0spark-class2.cmd %* cmd /V /E /C "%~dp0spark-class2.cmd" %*

View file

@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0.. set SPARK_HOME=%~dp0..
call %SPARK_HOME%\bin\load-spark-env.cmd call "%SPARK_HOME%\bin\load-spark-env.cmd"
rem Test that an argument was given rem Test that an argument was given
if "x%1"=="x" ( if "x%1"=="x" (
@ -32,9 +32,9 @@ rem Find assembly jar
set SPARK_ASSEMBLY_JAR=0 set SPARK_ASSEMBLY_JAR=0
if exist "%SPARK_HOME%\RELEASE" ( if exist "%SPARK_HOME%\RELEASE" (
set ASSEMBLY_DIR=%SPARK_HOME%\lib set ASSEMBLY_DIR="%SPARK_HOME%\lib"
) else ( ) else (
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION% set ASSEMBLY_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%"
) )
for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do ( for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
@ -50,7 +50,7 @@ set LAUNCH_CLASSPATH=%SPARK_ASSEMBLY_JAR%
rem Add the launcher build dir to the classpath if requested. rem Add the launcher build dir to the classpath if requested.
if not "x%SPARK_PREPEND_CLASSES%"=="x" ( if not "x%SPARK_PREPEND_CLASSES%"=="x" (
set LAUNCH_CLASSPATH=%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH% set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%"
) )
set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR% set _SPARK_ASSEMBLY=%SPARK_ASSEMBLY_JAR%
@ -62,7 +62,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
rem The launcher library prints the command to be executed in a single line suitable for being rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable. rem executed by the batch interpreter. So read all the output of the launcher into a variable.
set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt
"%RUNNER%" -cp %LAUNCH_CLASSPATH% org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT% "%RUNNER%" -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do ( for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
set SPARK_CMD=%%i set SPARK_CMD=%%i
) )

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running Spark shell. To avoid polluting the rem This is the entry point for running Spark shell. To avoid polluting the
rem environment, it just launches a new cmd to do the real work. rem environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0spark-shell2.cmd %* cmd /V /E /C "%~dp0spark-shell2.cmd" %*

View file

@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true" set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
:run_shell :run_shell
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %* "%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.repl.Main --name "Spark shell" %*

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running Spark submit. To avoid polluting the rem This is the entry point for running Spark submit. To avoid polluting the
rem environment, it just launches a new cmd to do the real work. rem environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0spark-submit2.cmd %* cmd /V /E /C spark-submit2.cmd %*

View file

@ -24,4 +24,4 @@ rem disable randomized hash for string in Python 3.3+
set PYTHONHASHSEED=0 set PYTHONHASHSEED=0
set CLASS=org.apache.spark.deploy.SparkSubmit set CLASS=org.apache.spark.deploy.SparkSubmit
%~dp0spark-class2.cmd %CLASS% %* "%~dp0spark-class2.cmd" %CLASS% %*

View file

@ -20,4 +20,4 @@ rem
rem This is the entry point for running SparkR. To avoid polluting the rem This is the entry point for running SparkR. To avoid polluting the
rem environment, it just launches a new cmd to do the real work. rem environment, it just launches a new cmd to do the real work.
cmd /V /E /C %~dp0sparkR2.cmd %* cmd /V /E /C "%~dp0sparkR2.cmd" %*

View file

@ -20,7 +20,7 @@ rem
rem Figure out where the Spark framework is installed rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0.. set SPARK_HOME=%~dp0..
call %SPARK_HOME%\bin\load-spark-env.cmd call "%SPARK_HOME%\bin\load-spark-env.cmd"
call %SPARK_HOME%\bin\spark-submit2.cmd sparkr-shell-main %* call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*