2012-09-24 18:43:19 -04:00
|
|
|
@echo off
|
|
|
|
|
2013-07-16 20:21:33 -04:00
|
|
|
rem
|
|
|
|
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
rem contributor license agreements. See the NOTICE file distributed with
|
|
|
|
rem this work for additional information regarding copyright ownership.
|
|
|
|
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
rem (the "License"); you may not use this file except in compliance with
|
|
|
|
rem the License. You may obtain a copy of the License at
|
|
|
|
rem
|
|
|
|
rem http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
rem
|
|
|
|
rem Unless required by applicable law or agreed to in writing, software
|
|
|
|
rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
rem See the License for the specific language governing permissions and
|
|
|
|
rem limitations under the License.
|
|
|
|
rem
|
|
|
|
|
2012-09-24 18:43:19 -04:00
|
|
|
rem Figure out where the Spark framework is installed
|
2015-03-11 04:03:01 -04:00
|
|
|
set SPARK_HOME=%~dp0..
|
2012-09-24 18:43:19 -04:00
|
|
|
|
|
|
|
rem Load environment variables from conf\spark-env.cmd, if it exists
|
2015-03-11 04:03:01 -04:00
|
|
|
if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
|
2012-09-24 18:43:19 -04:00
|
|
|
|
2013-02-07 00:56:00 -05:00
|
|
|
rem Test that an argument was given
|
2015-03-11 04:03:01 -04:00
|
|
|
if "x%1"=="x" (
|
2013-09-01 19:45:57 -04:00
|
|
|
echo Usage: spark-class ^<class^> [^<args^>]
|
2015-03-11 04:03:01 -04:00
|
|
|
exit /b 1
|
2014-03-09 14:08:39 -04:00
|
|
|
)
|
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
set LAUNCHER_CP=0
|
|
|
|
if exist %SPARK_HOME%\RELEASE goto find_release_launcher
|
2014-03-09 14:08:39 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
|
|
|
|
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
|
|
|
|
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
|
|
|
|
rem the "assembly" target instead of "package".
|
|
|
|
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
|
|
|
|
if exist %LAUNCHER_CLASSES% (
|
|
|
|
set LAUNCHER_CP=%LAUNCHER_CLASSES%
|
2014-03-09 14:08:39 -04:00
|
|
|
)
|
2015-03-11 04:03:01 -04:00
|
|
|
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
|
|
|
|
if exist %LAUNCHER_CLASSES% (
|
|
|
|
set LAUNCHER_CP=%LAUNCHER_CLASSES%
|
2014-08-23 11:09:30 -04:00
|
|
|
)
|
2015-03-11 04:03:01 -04:00
|
|
|
goto check_launcher
|
2012-09-24 18:43:19 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
:find_release_launcher
|
|
|
|
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
|
|
|
|
set LAUNCHER_CP=%%d
|
2013-09-01 19:45:57 -04:00
|
|
|
)
|
2013-05-11 01:42:22 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
:check_launcher
|
|
|
|
if "%LAUNCHER_CP%"=="0" (
|
|
|
|
echo Failed to find Spark launcher JAR.
|
|
|
|
echo You need to build Spark before running this program.
|
|
|
|
exit /b 1
|
2013-11-09 16:37:11 -05:00
|
|
|
)
|
|
|
|
|
2013-09-01 19:45:57 -04:00
|
|
|
rem Figure out where java is.
|
|
|
|
set RUNNER=java
|
|
|
|
if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
|
2012-09-24 18:43:19 -04:00
|
|
|
|
2015-03-11 04:03:01 -04:00
|
|
|
rem The launcher library prints the command to be executed in a single line suitable for being
|
|
|
|
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
|
|
|
|
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
|
|
|
|
set SPARK_CMD=%%i
|
2014-08-27 01:52:16 -04:00
|
|
|
)
|
2015-03-11 04:03:01 -04:00
|
|
|
%SPARK_CMD%
|