2013-07-17 20:51:18 -04:00
|
|
|
#!/usr/bin/env bash
|
2013-07-16 20:21:33 -04:00
|
|
|
|
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2013-06-25 14:14:21 -04:00
|
|
|
#
|
|
|
|
# Shell script for starting the Spark Shell REPL
|
2013-12-15 21:39:30 -05:00
|
|
|
|
|
|
|
cygwin=false
|
|
|
|
case "`uname`" in
|
|
|
|
CYGWIN*) cygwin=true;;
|
|
|
|
esac
|
2013-07-16 20:21:33 -04:00
|
|
|
|
2013-07-17 20:51:18 -04:00
|
|
|
# Enter posix mode for bash
|
|
|
|
set -o posix
|
|
|
|
|
2014-05-18 16:31:23 -04:00
|
|
|
## Global script variables
|
|
|
|
FWDIR="$(cd `dirname $0`/..; pwd)"
|
|
|
|
|
[SPARK-1808] Route bin/pyspark through Spark submit
**Problem.** For `bin/pyspark`, there is currently no other way to specify Spark configuration properties other than through `SPARK_JAVA_OPTS` in `conf/spark-env.sh`. However, this mechanism is supposedly deprecated. Instead, it needs to pick up configurations explicitly specified in `conf/spark-defaults.conf`.
**Solution.** Have `bin/pyspark` invoke `bin/spark-submit`, like all of its counterparts in Scala land (i.e. `bin/spark-shell`, `bin/run-example`). This has the additional benefit of making the invocation of all the user facing Spark scripts consistent.
**Details.** `bin/pyspark` inherently handles two cases: (1) running python applications and (2) running the python shell. For (1), Spark submit already handles running python applications. For cases in which `bin/pyspark` is given a python file, we can simply call pass the file directly to Spark submit and let it handle the rest.
For case (2), `bin/pyspark` starts a python process as before, which launches the JVM as a sub-process. The existing code already provides a code path to do this. All we needed to change is to use `bin/spark-submit` instead of `spark-class` to launch the JVM. This requires modifications to Spark submit to handle the pyspark shell as a special case.
This has been tested locally (OSX and Windows 7), on a standalone cluster, and on a YARN cluster. Running IPython also works as before, except now it takes in Spark submit arguments too.
Author: Andrew Or <andrewor14@gmail.com>
Closes #799 from andrewor14/pyspark-submit and squashes the following commits:
bf37e36 [Andrew Or] Minor changes
01066fa [Andrew Or] bin/pyspark for Windows
c8cb3bf [Andrew Or] Handle perverse app names (with escaped quotes)
1866f85 [Andrew Or] Windows is not cooperating
456d844 [Andrew Or] Guard against shlex hanging if PYSPARK_SUBMIT_ARGS is not set
7eebda8 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
b7ba0d8 [Andrew Or] Address a few comments (minor)
06eb138 [Andrew Or] Use shlex instead of writing our own parser
05879fa [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a823661 [Andrew Or] Fix --die-on-broken-pipe not propagated properly
6fba412 [Andrew Or] Deal with quotes + address various comments
fe4c8a7 [Andrew Or] Update --help for bin/pyspark
afe47bf [Andrew Or] Fix spark shell
f04aaa4 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a371d26 [Andrew Or] Route bin/pyspark through Spark submit
2014-05-17 01:34:38 -04:00
|
|
|
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
|
2014-04-25 02:59:16 -04:00
|
|
|
echo "Usage: ./bin/spark-shell [options]"
|
2014-05-18 16:31:23 -04:00
|
|
|
$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
|
2014-04-25 02:59:16 -04:00
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
2014-03-29 22:49:22 -04:00
|
|
|
function main(){
|
|
|
|
if $cygwin; then
|
|
|
|
# Workaround for issue involving JLine and Cygwin
|
|
|
|
# (see http://sourceforge.net/p/jline/bugs/40/).
|
|
|
|
# If you're using the Mintty terminal emulator in Cygwin, may need to set the
|
|
|
|
# "Backspace sends ^H" setting in "Keys" section of the Mintty options
|
|
|
|
# (see https://github.com/sbt/sbt/issues/562).
|
|
|
|
stty -icanon min 1 -echo > /dev/null 2>&1
|
2014-04-28 20:29:22 -04:00
|
|
|
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
|
[SPARK-1808] Route bin/pyspark through Spark submit
**Problem.** For `bin/pyspark`, there is currently no other way to specify Spark configuration properties other than through `SPARK_JAVA_OPTS` in `conf/spark-env.sh`. However, this mechanism is supposedly deprecated. Instead, it needs to pick up configurations explicitly specified in `conf/spark-defaults.conf`.
**Solution.** Have `bin/pyspark` invoke `bin/spark-submit`, like all of its counterparts in Scala land (i.e. `bin/spark-shell`, `bin/run-example`). This has the additional benefit of making the invocation of all the user facing Spark scripts consistent.
**Details.** `bin/pyspark` inherently handles two cases: (1) running python applications and (2) running the python shell. For (1), Spark submit already handles running python applications. For cases in which `bin/pyspark` is given a python file, we can simply call pass the file directly to Spark submit and let it handle the rest.
For case (2), `bin/pyspark` starts a python process as before, which launches the JVM as a sub-process. The existing code already provides a code path to do this. All we needed to change is to use `bin/spark-submit` instead of `spark-class` to launch the JVM. This requires modifications to Spark submit to handle the pyspark shell as a special case.
This has been tested locally (OSX and Windows 7), on a standalone cluster, and on a YARN cluster. Running IPython also works as before, except now it takes in Spark submit arguments too.
Author: Andrew Or <andrewor14@gmail.com>
Closes #799 from andrewor14/pyspark-submit and squashes the following commits:
bf37e36 [Andrew Or] Minor changes
01066fa [Andrew Or] bin/pyspark for Windows
c8cb3bf [Andrew Or] Handle perverse app names (with escaped quotes)
1866f85 [Andrew Or] Windows is not cooperating
456d844 [Andrew Or] Guard against shlex hanging if PYSPARK_SUBMIT_ARGS is not set
7eebda8 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
b7ba0d8 [Andrew Or] Address a few comments (minor)
06eb138 [Andrew Or] Use shlex instead of writing our own parser
05879fa [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a823661 [Andrew Or] Fix --die-on-broken-pipe not propagated properly
6fba412 [Andrew Or] Deal with quotes + address various comments
fe4c8a7 [Andrew Or] Update --help for bin/pyspark
afe47bf [Andrew Or] Fix spark shell
f04aaa4 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a371d26 [Andrew Or] Route bin/pyspark through Spark submit
2014-05-17 01:34:38 -04:00
|
|
|
$FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
|
2014-03-29 22:49:22 -04:00
|
|
|
stty icanon echo > /dev/null 2>&1
|
|
|
|
else
|
2014-04-28 20:29:22 -04:00
|
|
|
export SPARK_SUBMIT_OPTS
|
[SPARK-1808] Route bin/pyspark through Spark submit
**Problem.** For `bin/pyspark`, there is currently no other way to specify Spark configuration properties other than through `SPARK_JAVA_OPTS` in `conf/spark-env.sh`. However, this mechanism is supposedly deprecated. Instead, it needs to pick up configurations explicitly specified in `conf/spark-defaults.conf`.
**Solution.** Have `bin/pyspark` invoke `bin/spark-submit`, like all of its counterparts in Scala land (i.e. `bin/spark-shell`, `bin/run-example`). This has the additional benefit of making the invocation of all the user facing Spark scripts consistent.
**Details.** `bin/pyspark` inherently handles two cases: (1) running python applications and (2) running the python shell. For (1), Spark submit already handles running python applications. For cases in which `bin/pyspark` is given a python file, we can simply call pass the file directly to Spark submit and let it handle the rest.
For case (2), `bin/pyspark` starts a python process as before, which launches the JVM as a sub-process. The existing code already provides a code path to do this. All we needed to change is to use `bin/spark-submit` instead of `spark-class` to launch the JVM. This requires modifications to Spark submit to handle the pyspark shell as a special case.
This has been tested locally (OSX and Windows 7), on a standalone cluster, and on a YARN cluster. Running IPython also works as before, except now it takes in Spark submit arguments too.
Author: Andrew Or <andrewor14@gmail.com>
Closes #799 from andrewor14/pyspark-submit and squashes the following commits:
bf37e36 [Andrew Or] Minor changes
01066fa [Andrew Or] bin/pyspark for Windows
c8cb3bf [Andrew Or] Handle perverse app names (with escaped quotes)
1866f85 [Andrew Or] Windows is not cooperating
456d844 [Andrew Or] Guard against shlex hanging if PYSPARK_SUBMIT_ARGS is not set
7eebda8 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
b7ba0d8 [Andrew Or] Address a few comments (minor)
06eb138 [Andrew Or] Use shlex instead of writing our own parser
05879fa [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a823661 [Andrew Or] Fix --die-on-broken-pipe not propagated properly
6fba412 [Andrew Or] Deal with quotes + address various comments
fe4c8a7 [Andrew Or] Update --help for bin/pyspark
afe47bf [Andrew Or] Fix spark shell
f04aaa4 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a371d26 [Andrew Or] Route bin/pyspark through Spark submit
2014-05-17 01:34:38 -04:00
|
|
|
$FWDIR/bin/spark-submit spark-shell "$@" --class org.apache.spark.repl.Main
|
2014-03-29 22:49:22 -04:00
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2013-06-25 02:55:09 -04:00
|
|
|
# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
|
|
|
|
# binary distribution of Spark where Scala is not installed
|
|
|
|
exit_status=127
|
|
|
|
saved_stty=""
|
|
|
|
|
|
|
|
# restore stty settings (echo in particular)
|
|
|
|
function restoreSttySettings() {
|
|
|
|
stty $saved_stty
|
|
|
|
saved_stty=""
|
|
|
|
}
|
|
|
|
|
|
|
|
function onExit() {
|
|
|
|
if [[ "$saved_stty" != "" ]]; then
|
|
|
|
restoreSttySettings
|
|
|
|
fi
|
|
|
|
exit $exit_status
|
|
|
|
}
|
|
|
|
|
|
|
|
# to reenable echo if we are interrupted before completing.
|
|
|
|
trap onExit INT
|
|
|
|
|
|
|
|
# save terminal settings
|
|
|
|
saved_stty=$(stty -g 2>/dev/null)
|
|
|
|
# clear on error so we don't later try to restore them
|
|
|
|
if [[ ! $? ]]; then
|
|
|
|
saved_stty=""
|
|
|
|
fi
|
|
|
|
|
2014-04-28 20:29:22 -04:00
|
|
|
main "$@"
|
2013-06-25 02:55:09 -04:00
|
|
|
|
|
|
|
# record the exit status lest it be overwritten:
|
|
|
|
# then reenable echo and propagate the code.
|
|
|
|
exit_status=$?
|
|
|
|
onExit
|
2014-03-29 22:49:22 -04:00
|
|
|
|