2013-07-16 20:21:33 -04:00
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
2012-10-19 20:16:41 -04:00
|
|
|
"""
|
|
|
|
An interactive shell.
|
|
|
|
|
2013-01-20 04:57:44 -05:00
|
|
|
This file is designed to be launched as a PYTHONSTARTUP script.
|
2013-01-02 00:25:49 -05:00
|
|
|
"""
|
2014-04-16 22:05:40 -04:00
|
|
|
|
|
|
|
import sys
|
2014-04-18 13:10:13 -04:00
|
|
|
if sys.version_info[0] != 2:
|
2014-04-16 22:05:40 -04:00
|
|
|
print("Error: Default Python used is Python%s" % sys.version_info.major)
|
|
|
|
print("\tSet env variable PYSPARK_PYTHON to Python2 binary and re-run it.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2013-01-02 00:25:49 -05:00
|
|
|
import os
|
2013-09-01 01:38:32 -04:00
|
|
|
import platform
|
2013-01-30 18:04:06 -05:00
|
|
|
import pyspark
|
2013-09-07 12:28:39 -04:00
|
|
|
from pyspark.context import SparkContext
|
2013-09-07 17:41:31 -04:00
|
|
|
from pyspark.storagelevel import StorageLevel
|
2012-10-19 20:16:41 -04:00
|
|
|
|
2013-08-12 21:00:35 -04:00
|
|
|
# this is the equivalent of ADD_JARS
|
|
|
|
add_files = os.environ.get("ADD_FILES").split(',') if os.environ.get("ADD_FILES") != None else None
|
2012-10-19 20:16:41 -04:00
|
|
|
|
2014-04-10 20:49:30 -04:00
|
|
|
if os.environ.get("SPARK_EXECUTOR_URI"):
|
|
|
|
SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
|
|
|
|
|
[SPARK-1808] Route bin/pyspark through Spark submit
**Problem.** For `bin/pyspark`, there is currently no other way to specify Spark configuration properties other than through `SPARK_JAVA_OPTS` in `conf/spark-env.sh`. However, this mechanism is supposedly deprecated. Instead, it needs to pick up configurations explicitly specified in `conf/spark-defaults.conf`.
**Solution.** Have `bin/pyspark` invoke `bin/spark-submit`, like all of its counterparts in Scala land (i.e. `bin/spark-shell`, `bin/run-example`). This has the additional benefit of making the invocation of all the user facing Spark scripts consistent.
**Details.** `bin/pyspark` inherently handles two cases: (1) running python applications and (2) running the python shell. For (1), Spark submit already handles running python applications. For cases in which `bin/pyspark` is given a python file, we can simply call pass the file directly to Spark submit and let it handle the rest.
For case (2), `bin/pyspark` starts a python process as before, which launches the JVM as a sub-process. The existing code already provides a code path to do this. All we needed to change is to use `bin/spark-submit` instead of `spark-class` to launch the JVM. This requires modifications to Spark submit to handle the pyspark shell as a special case.
This has been tested locally (OSX and Windows 7), on a standalone cluster, and on a YARN cluster. Running IPython also works as before, except now it takes in Spark submit arguments too.
Author: Andrew Or <andrewor14@gmail.com>
Closes #799 from andrewor14/pyspark-submit and squashes the following commits:
bf37e36 [Andrew Or] Minor changes
01066fa [Andrew Or] bin/pyspark for Windows
c8cb3bf [Andrew Or] Handle perverse app names (with escaped quotes)
1866f85 [Andrew Or] Windows is not cooperating
456d844 [Andrew Or] Guard against shlex hanging if PYSPARK_SUBMIT_ARGS is not set
7eebda8 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
b7ba0d8 [Andrew Or] Address a few comments (minor)
06eb138 [Andrew Or] Use shlex instead of writing our own parser
05879fa [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a823661 [Andrew Or] Fix --die-on-broken-pipe not propagated properly
6fba412 [Andrew Or] Deal with quotes + address various comments
fe4c8a7 [Andrew Or] Update --help for bin/pyspark
afe47bf [Andrew Or] Fix spark shell
f04aaa4 [Andrew Or] Merge branch 'master' of github.com:apache/spark into pyspark-submit
a371d26 [Andrew Or] Route bin/pyspark through Spark submit
2014-05-17 01:34:38 -04:00
|
|
|
sc = SparkContext(appName="PySparkShell", pyFiles=add_files)
|
2013-09-01 01:38:32 -04:00
|
|
|
|
2014-04-16 22:05:40 -04:00
|
|
|
print("""Welcome to
|
2013-09-01 01:38:32 -04:00
|
|
|
____ __
|
|
|
|
/ __/__ ___ _____/ /__
|
|
|
|
_\ \/ _ \/ _ `/ __/ '_/
|
2014-02-08 19:00:43 -05:00
|
|
|
/__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
|
2013-09-01 01:38:32 -04:00
|
|
|
/_/
|
2014-04-16 22:05:40 -04:00
|
|
|
""")
|
|
|
|
print("Using Python version %s (%s, %s)" % (
|
2013-09-01 01:38:32 -04:00
|
|
|
platform.python_version(),
|
|
|
|
platform.python_build()[0],
|
2014-04-16 22:05:40 -04:00
|
|
|
platform.python_build()[1]))
|
2014-04-18 13:10:13 -04:00
|
|
|
print("SparkContext available as sc.")
|
2012-10-19 20:16:41 -04:00
|
|
|
|
2013-08-12 21:00:35 -04:00
|
|
|
if add_files != None:
|
2014-04-16 22:05:40 -04:00
|
|
|
print("Adding files: [%s]" % ", ".join(add_files))
|
2013-08-12 21:00:35 -04:00
|
|
|
|
2014-01-02 08:20:12 -05:00
|
|
|
# The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP,
|
2013-01-02 00:25:49 -05:00
|
|
|
# which allows us to execute the user's PYTHONSTARTUP file:
|
|
|
|
_pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
|
|
|
|
if _pythonstartup and os.path.isfile(_pythonstartup):
|
2013-01-20 04:57:44 -05:00
|
|
|
execfile(_pythonstartup)
|