[SPARK-32839][WINDOWS] Make Spark scripts working with the spaces in paths on Windows

### What changes were proposed in this pull request?

If you install Spark under the path that has whitespaces, it does not work on Windows, for example as below:

```
>>> SparkSession.builder.getOrCreate()
Presence of build for multiple Scala versions detected (C:\...\assembly\target\scala-2.13 and C:\...\assembly\target\scala-2.12).
Remove one of them or, set SPARK_SCALA_VERSION=2.13 in spark-env.cmd.
Visit https://spark.apache.org/docs/latest/configuration.html#environment-variables for more details about setting environment variables in spark-env.cmd.
Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
```

This PR fixes the whitespace handling to support any paths on Windows.

### Why are the changes needed?

To support Spark working with whitespaces in paths on Windows.

### Does this PR introduce _any_ user-facing change?

Yes, users will be able to install and run Spark under the paths with whitespaces.

### How was this patch tested?

Manually tested.

Closes #29706 from HyukjinKwon/window-space-path.

Authored-by: HyukjinKwon <gurwls223@apache.org>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
This commit is contained in:
HyukjinKwon 2020-09-14 13:15:14 +09:00
parent e558b8a0fd
commit 742fcff350
3 changed files with 7 additions and 7 deletions

View file

@ -55,6 +55,6 @@ if "x%SPARK_HOME%"=="x" (
set SPARK_HOME=%~dp0.. set SPARK_HOME=%~dp0..
) else ( ) else (
rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i for /f "delims=" %%i in ('%PYTHON_RUNNER% "%FIND_SPARK_HOME_PYTHON_SCRIPT%"') do set SPARK_HOME=%%i
) )
) )

View file

@ -24,7 +24,7 @@ rem conf\ subdirectory.
if not defined SPARK_ENV_LOADED ( if not defined SPARK_ENV_LOADED (
set SPARK_ENV_LOADED=1 set SPARK_ENV_LOADED=1
if [%SPARK_CONF_DIR%] == [] ( if not defined SPARK_CONF_DIR (
set SPARK_CONF_DIR=%~dp0..\conf set SPARK_CONF_DIR=%~dp0..\conf
) )
@ -36,8 +36,8 @@ rem Setting SPARK_SCALA_VERSION if not already set.
set SCALA_VERSION_1=2.13 set SCALA_VERSION_1=2.13
set SCALA_VERSION_2=2.12 set SCALA_VERSION_2=2.12
set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1% set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%"
set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2% set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%"
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
if not defined SPARK_SCALA_VERSION ( if not defined SPARK_SCALA_VERSION (

6
bin/spark-class2.cmd Normal file → Executable file
View file

@ -30,12 +30,12 @@ if "x%1"=="x" (
rem Find Spark jars. rem Find Spark jars.
if exist "%SPARK_HOME%\jars" ( if exist "%SPARK_HOME%\jars" (
set SPARK_JARS_DIR="%SPARK_HOME%\jars" set SPARK_JARS_DIR=%SPARK_HOME%\jars
) else ( ) else (
set SPARK_JARS_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars" set SPARK_JARS_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars
) )
if not exist "%SPARK_JARS_DIR%"\ ( if not exist "%SPARK_JARS_DIR%" (
echo Failed to find Spark jars directory. echo Failed to find Spark jars directory.
echo You need to build Spark before running this program. echo You need to build Spark before running this program.
exit /b 1 exit /b 1