spark-instrumented-optimizer/pyspark

33 lines
850 B
Plaintext
Raw Normal View History

#!/usr/bin/env bash
2012-10-19 20:16:41 -04:00
# Figure out where the Scala framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
2012-10-19 20:16:41 -04:00
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
# Load environment variables from conf/spark-env.sh, if it exists
if [ -e $FWDIR/conf/spark-env.sh ] ; then
. $FWDIR/conf/spark-env.sh
fi
# Figure out which Python executable to use
if [ -z "$PYSPARK_PYTHON" ] ; then
PYSPARK_PYTHON="python"
fi
export PYSPARK_PYTHON
# Add the PySpark classes to the Python path:
export PYTHONPATH=$SPARK_HOME/python/:$PYTHONPATH
2012-10-19 20:16:41 -04:00
# Load the PySpark shell.py script when ./pyspark is used interactively:
export OLD_PYTHONSTARTUP=$PYTHONSTARTUP
export PYTHONSTARTUP=$FWDIR/python/pyspark/shell.py
# Launch with `scala` by default:
if [[ "$SPARK_LAUNCH_WITH_SCALA" != "0" ]] ; then
export SPARK_LAUNCH_WITH_SCALA=1
fi
2012-10-19 20:16:41 -04:00
exec "$PYSPARK_PYTHON" "$@"