742fcff350
### What changes were proposed in this pull request? If you install Spark under the path that has whitespaces, it does not work on Windows, for example as below: ``` >>> SparkSession.builder.getOrCreate() Presence of build for multiple Scala versions detected (C:\...\assembly\target\scala-2.13 and C:\...\assembly\target\scala-2.12). Remove one of them or, set SPARK_SCALA_VERSION=2.13 in spark-env.cmd. Visit https://spark.apache.org/docs/latest/configuration.html#environment-variables for more details about setting environment variables in spark-env.cmd. Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd. ``` This PR fixes the whitespace handling to support any paths on Windows. ### Why are the changes needed? To support Spark working with whitespaces in paths on Windows. ### Does this PR introduce _any_ user-facing change? Yes, users will be able to install and run Spark under the paths with whitespaces. ### How was this patch tested? Manually tested. Closes #29706 from HyukjinKwon/window-space-path. Authored-by: HyukjinKwon <gurwls223@apache.org> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
63 lines
2.2 KiB
Batchfile
63 lines
2.2 KiB
Batchfile
@echo off
|
|
|
|
rem
|
|
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
|
rem contributor license agreements. See the NOTICE file distributed with
|
|
rem this work for additional information regarding copyright ownership.
|
|
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
|
rem (the "License"); you may not use this file except in compliance with
|
|
rem the License. You may obtain a copy of the License at
|
|
rem
|
|
rem http://www.apache.org/licenses/LICENSE-2.0
|
|
rem
|
|
rem Unless required by applicable law or agreed to in writing, software
|
|
rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
rem See the License for the specific language governing permissions and
|
|
rem limitations under the License.
|
|
rem
|
|
|
|
rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
|
|
rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
|
|
rem conf\ subdirectory.
|
|
|
|
if not defined SPARK_ENV_LOADED (
|
|
set SPARK_ENV_LOADED=1
|
|
|
|
if not defined SPARK_CONF_DIR (
|
|
set SPARK_CONF_DIR=%~dp0..\conf
|
|
)
|
|
|
|
call :LoadSparkEnv
|
|
)
|
|
|
|
rem Setting SPARK_SCALA_VERSION if not already set.
|
|
|
|
set SCALA_VERSION_1=2.13
|
|
set SCALA_VERSION_2=2.12
|
|
|
|
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%"
|
|
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%"
|
|
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
|
|
|
|
if not defined SPARK_SCALA_VERSION (
|
|
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
|
|
echo Presence of build for multiple Scala versions detected ^(%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%^).
|
|
echo Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in spark-env.cmd.
|
|
echo Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd.
|
|
echo Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
|
|
exit 1
|
|
)
|
|
if exist %ASSEMBLY_DIR1% (
|
|
set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
|
|
) else (
|
|
set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
|
|
)
|
|
)
|
|
exit /b 0
|
|
|
|
:LoadSparkEnv
|
|
if exist "%SPARK_CONF_DIR%\spark-env.cmd" (
|
|
call "%SPARK_CONF_DIR%\spark-env.cmd"
|
|
)
|