0368ff30dd
## What changes were proposed in this pull request? https://issues.apache.org/jira/browse/SPARK-13973 Following discussion with srowen the IPYTHON and IPYTHON_OPTS variables are removed. If they are set in the user's environment, pyspark will not execute and prints an error message. Failing noisily will force users to remove these options and learn the new configuration scheme, which is much more sustainable and less confusing. ## How was this patch tested? Manual testing; set IPYTHON=1 and verified that the error message prints. Author: pshearer <pshearer@massmutual.com> Author: shearerp <shearerp@umich.edu> Closes #12528 from shearerp/master.
84 lines
3.1 KiB
Bash
Executable file
84 lines
3.1 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
if [ -z "${SPARK_HOME}" ]; then
|
|
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
|
|
fi
|
|
|
|
source "${SPARK_HOME}"/bin/load-spark-env.sh
|
|
export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
|
|
|
|
# In Spark 2.0, IPYTHON and IPYTHON_OPTS are removed and pyspark fails to launch if either option
|
|
# is set in the user's environment. Instead, users should set PYSPARK_DRIVER_PYTHON=ipython
|
|
# to use IPython and set PYSPARK_DRIVER_PYTHON_OPTS to pass options when starting the Python driver
|
|
# (e.g. PYSPARK_DRIVER_PYTHON_OPTS='notebook'). This supports full customization of the IPython
|
|
# and executor Python executables.
|
|
|
|
# Determine the Python executable to use if PYSPARK_PYTHON or PYSPARK_DRIVER_PYTHON isn't set:
|
|
if hash python2.7 2>/dev/null; then
|
|
# Attempt to use Python 2.7, if installed:
|
|
DEFAULT_PYTHON="python2.7"
|
|
else
|
|
DEFAULT_PYTHON="python"
|
|
fi
|
|
|
|
# Fail noisily if removed options are set
|
|
if [[ -n "$IPYTHON" || -n "$IPYTHON_OPTS" ]]; then
|
|
echo "Error in pyspark startup:"
|
|
echo "IPYTHON and IPYTHON_OPTS are removed in Spark 2.0+. Remove these from the environment and set PYSPARK_DRIVER_PYTHON and PYSPARK_DRIVER_PYTHON_OPTS instead."
|
|
exit 1
|
|
fi
|
|
|
|
# Default to standard python interpreter unless told otherwise
|
|
if [[ -z "$PYSPARK_DRIVER_PYTHON" ]]; then
|
|
PYSPARK_DRIVER_PYTHON="${PYSPARK_PYTHON:-"$DEFAULT_PYTHON"}"
|
|
fi
|
|
|
|
# Determine the Python executable to use for the executors:
|
|
if [[ -z "$PYSPARK_PYTHON" ]]; then
|
|
if [[ $PYSPARK_DRIVER_PYTHON == *ipython* && $DEFAULT_PYTHON != "python2.7" ]]; then
|
|
echo "IPython requires Python 2.7+; please install python2.7 or set PYSPARK_PYTHON" 1>&2
|
|
exit 1
|
|
else
|
|
PYSPARK_PYTHON="$DEFAULT_PYTHON"
|
|
fi
|
|
fi
|
|
export PYSPARK_PYTHON
|
|
|
|
# Add the PySpark classes to the Python path:
|
|
export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
|
|
export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9.2-src.zip:$PYTHONPATH"
|
|
|
|
# Load the PySpark shell.py script when ./pyspark is used interactively:
|
|
export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
|
|
export PYTHONSTARTUP="${SPARK_HOME}/python/pyspark/shell.py"
|
|
|
|
# For pyspark tests
|
|
if [[ -n "$SPARK_TESTING" ]]; then
|
|
unset YARN_CONF_DIR
|
|
unset HADOOP_CONF_DIR
|
|
export PYTHONHASHSEED=0
|
|
exec "$PYSPARK_DRIVER_PYTHON" -m $1
|
|
exit
|
|
fi
|
|
|
|
export PYSPARK_DRIVER_PYTHON
|
|
export PYSPARK_DRIVER_PYTHON_OPTS
|
|
exec "${SPARK_HOME}"/bin/spark-submit pyspark-shell-main --name "PySparkShell" "$@"
|