spark-instrumented-optimizer/bin/spark-class2.cmd
HyukjinKwon 742fcff350 [SPARK-32839][WINDOWS] Make Spark scripts working with the spaces in paths on Windows
### What changes were proposed in this pull request?

If you install Spark under the path that has whitespaces, it does not work on Windows, for example as below:

```
>>> SparkSession.builder.getOrCreate()
Presence of build for multiple Scala versions detected (C:\...\assembly\target\scala-2.13 and C:\...\assembly\target\scala-2.12).
Remove one of them or, set SPARK_SCALA_VERSION=2.13 in spark-env.cmd.
Visit https://spark.apache.org/docs/latest/configuration.html#environment-variables for more details about setting environment variables in spark-env.cmd.
Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
```

This PR fixes the whitespace handling to support any paths on Windows.

### Why are the changes needed?

To support Spark working with whitespaces in paths on Windows.

### Does this PR introduce _any_ user-facing change?

Yes, users will be able to install and run Spark under the paths with whitespaces.

### How was this patch tested?

Manually tested.

Closes #29706 from HyukjinKwon/window-space-path.

Authored-by: HyukjinKwon <gurwls223@apache.org>
Signed-off-by: HyukjinKwon <gurwls223@apache.org>
2020-09-14 13:15:14 +09:00

78 lines
2.7 KiB
Batchfile
Executable file

@echo off
rem
rem Licensed to the Apache Software Foundation (ASF) under one or more
rem contributor license agreements. See the NOTICE file distributed with
rem this work for additional information regarding copyright ownership.
rem The ASF licenses this file to You under the Apache License, Version 2.0
rem (the "License"); you may not use this file except in compliance with
rem the License. You may obtain a copy of the License at
rem
rem http://www.apache.org/licenses/LICENSE-2.0
rem
rem Unless required by applicable law or agreed to in writing, software
rem distributed under the License is distributed on an "AS IS" BASIS,
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
rem See the License for the specific language governing permissions and
rem limitations under the License.
rem
rem Figure out where the Spark framework is installed
call "%~dp0find-spark-home.cmd"
call "%SPARK_HOME%\bin\load-spark-env.cmd"
rem Test that an argument was given
if "x%1"=="x" (
echo Usage: spark-class ^<class^> [^<args^>]
exit /b 1
)
rem Find Spark jars.
if exist "%SPARK_HOME%\jars" (
set SPARK_JARS_DIR=%SPARK_HOME%\jars
) else (
set SPARK_JARS_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars
)
if not exist "%SPARK_JARS_DIR%" (
echo Failed to find Spark jars directory.
echo You need to build Spark before running this program.
exit /b 1
)
set LAUNCH_CLASSPATH=%SPARK_JARS_DIR%\*
rem Add the launcher build dir to the classpath if requested.
if not "x%SPARK_PREPEND_CLASSES%"=="x" (
set LAUNCH_CLASSPATH="%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCH_CLASSPATH%"
)
rem Figure out where java is.
set RUNNER=java
if not "x%JAVA_HOME%"=="x" (
set RUNNER=%JAVA_HOME%\bin\java
) else (
where /q "%RUNNER%"
if ERRORLEVEL 1 (
echo Java not found and JAVA_HOME environment variable is not set.
echo Install Java and set JAVA_HOME to point to the Java installation directory.
exit /b 1
)
)
rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
:gen
set LAUNCHER_OUTPUT=%temp%\spark-class-launcher-output-%RANDOM%.txt
rem SPARK-28302: %RANDOM% would return the same number if we call it instantly after last call,
rem so we should make it sure to generate unique file to avoid process collision of writing into
rem the same file concurrently.
if exist %LAUNCHER_OUTPUT% goto :gen
"%RUNNER%" -Xmx128m -cp "%LAUNCH_CLASSPATH%" org.apache.spark.launcher.Main %* > %LAUNCHER_OUTPUT%
for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
set SPARK_CMD=%%i
)
del %LAUNCHER_OUTPUT%
%SPARK_CMD%