7437720952
### What changes were proposed in this pull request? Fix regression bug in load-spark-env.cmd with Spark 3.0.0 ### Why are the changes needed? cmd doesn't support set env twice. So set `SPARK_ENV_CMD=%SPARK_CONF_DIR%\%SPARK_ENV_CMD%` doesn't take effect, which caused regression. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Manually tested. 1. Create a spark-env.cmd under conf folder. Inside this, `echo spark-env.cmd` 2. Run old load-spark-env.cmd, nothing printed in the output 2. Run fixed load-spark-env.cmd, `spark-env.cmd` showed in the output. Closes #29044 from warrenzhu25/32227. Lead-authored-by: Warren Zhu <zhonzh@microsoft.com> Co-authored-by: Warren Zhu <warren.zhu25@gmail.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
63 lines
2.2 KiB
Batchfile
63 lines
2.2 KiB
Batchfile
@echo off
|
|
|
|
rem
|
|
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
|
rem contributor license agreements. See the NOTICE file distributed with
|
|
rem this work for additional information regarding copyright ownership.
|
|
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
|
rem (the "License"); you may not use this file except in compliance with
|
|
rem the License. You may obtain a copy of the License at
|
|
rem
|
|
rem http://www.apache.org/licenses/LICENSE-2.0
|
|
rem
|
|
rem Unless required by applicable law or agreed to in writing, software
|
|
rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
rem See the License for the specific language governing permissions and
|
|
rem limitations under the License.
|
|
rem
|
|
|
|
rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
|
|
rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
|
|
rem conf\ subdirectory.
|
|
|
|
if not defined SPARK_ENV_LOADED (
|
|
set SPARK_ENV_LOADED=1
|
|
|
|
if [%SPARK_CONF_DIR%] == [] (
|
|
set SPARK_CONF_DIR=%~dp0..\conf
|
|
)
|
|
|
|
call :LoadSparkEnv
|
|
)
|
|
|
|
rem Setting SPARK_SCALA_VERSION if not already set.
|
|
|
|
set SCALA_VERSION_1=2.13
|
|
set SCALA_VERSION_2=2.12
|
|
|
|
set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
|
|
set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
|
|
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
|
|
|
|
if not defined SPARK_SCALA_VERSION (
|
|
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
|
|
echo Presence of build for multiple Scala versions detected ^(%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%^).
|
|
echo Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in spark-env.cmd.
|
|
echo Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd.
|
|
echo Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
|
|
exit 1
|
|
)
|
|
if exist %ASSEMBLY_DIR1% (
|
|
set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
|
|
) else (
|
|
set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
|
|
)
|
|
)
|
|
exit /b 0
|
|
|
|
:LoadSparkEnv
|
|
if exist "%SPARK_CONF_DIR%\spark-env.cmd" (
|
|
call "%SPARK_CONF_DIR%\spark-env.cmd"
|
|
)
|