Fix 'IPYTHON=1 ./pyspark' throwing 'ValueError: Cannot run multiple SparkContexts at once'

This commit is contained in:
Nick Pentreath 2013-12-12 13:08:59 +02:00
parent d2efe13574
commit 8cdfb08c47

View file

@ -59,8 +59,7 @@ if [ -n "$IPYTHON_OPTS" ]; then
fi
if [[ "$IPYTHON" = "1" ]] ; then
IPYTHON_OPTS=${IPYTHON_OPTS:--i}
exec ipython "$IPYTHON_OPTS" -c "%run $PYTHONSTARTUP"
exec ipython "$IPYTHON_OPTS" "$@"
else
exec "$PYSPARK_PYTHON" "$@"
fi