ee571d79e5
## What changes were proposed in this pull request? We use SPARK_CONF_DIR to switch spark conf directory and can be visited if we explicitly export it in spark-env.sh, but with default settings, it can't be done. This PR export SPARK_CONF_DIR while it is default. ### Before ``` KentKentsMacBookPro ~/Documents/spark-packages/spark-2.3.0-SNAPSHOT-bin-master bin/spark-shell --master local Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties Setting default log level to "WARN". To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel). 17/11/08 10:28:44 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 17/11/08 10:28:45 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041. Spark context Web UI available at http://169.254.168.63:4041 Spark context available as 'sc' (master = local, app id = local-1510108125770). Spark session available as 'spark'. Welcome to ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ /___/ .__/\_,_/_/ /_/\_\ version 2.3.0-SNAPSHOT /_/ Using Scala version 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_65) Type in expressions to have them evaluated. Type :help for more information. scala> sys.env.get("SPARK_CONF_DIR") res0: Option[String] = None ``` ### After ``` scala> sys.env.get("SPARK_CONF_DIR") res0: Option[String] = Some(/Users/Kent/Documents/spark/conf) ``` ## How was this patch tested? vanzin Author: Kent Yao <yaooqinn@hotmail.com> Closes #19688 from yaooqinn/SPARK-22466.
58 lines
1.8 KiB
Batchfile
58 lines
1.8 KiB
Batchfile
@echo off
|
|
|
|
rem
|
|
rem Licensed to the Apache Software Foundation (ASF) under one or more
|
|
rem contributor license agreements. See the NOTICE file distributed with
|
|
rem this work for additional information regarding copyright ownership.
|
|
rem The ASF licenses this file to You under the Apache License, Version 2.0
|
|
rem (the "License"); you may not use this file except in compliance with
|
|
rem the License. You may obtain a copy of the License at
|
|
rem
|
|
rem http://www.apache.org/licenses/LICENSE-2.0
|
|
rem
|
|
rem Unless required by applicable law or agreed to in writing, software
|
|
rem distributed under the License is distributed on an "AS IS" BASIS,
|
|
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
rem See the License for the specific language governing permissions and
|
|
rem limitations under the License.
|
|
rem
|
|
|
|
rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
|
|
rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
|
|
rem conf\ subdirectory.
|
|
|
|
if [%SPARK_ENV_LOADED%] == [] (
|
|
set SPARK_ENV_LOADED=1
|
|
|
|
if [%SPARK_CONF_DIR%] == [] (
|
|
set SPARK_CONF_DIR=%~dp0..\conf
|
|
)
|
|
|
|
call :LoadSparkEnv
|
|
)
|
|
|
|
rem Setting SPARK_SCALA_VERSION if not already set.
|
|
|
|
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-2.11"
|
|
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-2.12"
|
|
|
|
if [%SPARK_SCALA_VERSION%] == [] (
|
|
|
|
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
|
|
echo "Presence of build for multiple Scala versions detected."
|
|
echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
|
|
exit 1
|
|
)
|
|
if exist %ASSEMBLY_DIR2% (
|
|
set SPARK_SCALA_VERSION=2.11
|
|
) else (
|
|
set SPARK_SCALA_VERSION=2.12
|
|
)
|
|
)
|
|
exit /b 0
|
|
|
|
:LoadSparkEnv
|
|
if exist "%SPARK_CONF_DIR%\spark-env.cmd" (
|
|
call "%SPARK_CONF_DIR%\spark-env.cmd"
|
|
)
|