d614967b0b
As described in [SPARK-2627](https://issues.apache.org/jira/browse/SPARK-2627), we'd like Python code to automatically be checked for PEP 8 compliance by Jenkins. This pull request aims to do that. Notes: * We may need to install [`pep8`](https://pypi.python.org/pypi/pep8) on the build server. * I'm expecting tests to fail now that PEP 8 compliance is being checked as part of the build. I'm fine with cleaning up any remaining PEP 8 violations as part of this pull request. * I did not understand why the RAT and scalastyle reports are saved to text files. I did the same for the PEP 8 check, but only so that the console output style can match those for the RAT and scalastyle checks. The PEP 8 report is removed right after the check is complete. * Updates to the ["Contributing to Spark"](https://cwiki.apache.org/confluence/display/SPARK/Contributing+to+Spark) guide will be submitted elsewhere, as I don't believe that text is part of the Spark repo. Author: Nicholas Chammas <nicholas.chammas@gmail.com> Author: nchammas <nicholas.chammas@gmail.com> Closes #1744 from nchammas/master and squashes the following commits: 274b238 [Nicholas Chammas] [SPARK-2627] [PySpark] minor indentation changes 983d963 [nchammas] Merge pull request #5 from apache/master 1db5314 [nchammas] Merge pull request #4 from apache/master 0e0245f [Nicholas Chammas] [SPARK-2627] undo erroneous whitespace fixes bf30942 [Nicholas Chammas] [SPARK-2627] PEP8: comment spacing 6db9a44 [nchammas] Merge pull request #3 from apache/master 7b4750e [Nicholas Chammas] merge upstream changes 91b7584 [Nicholas Chammas] [SPARK-2627] undo unnecessary line breaks 44e3e56 [Nicholas Chammas] [SPARK-2627] use tox.ini to exclude files b09fae2 [Nicholas Chammas] don't wrap comments unnecessarily bfb9f9f [Nicholas Chammas] [SPARK-2627] keep up with the PEP 8 fixes 9da347f [nchammas] Merge pull request #2 from apache/master aa5b4b5 [Nicholas Chammas] [SPARK-2627] follow Spark bash style for if blocks d0a83b9 [Nicholas Chammas] [SPARK-2627] check that pep8 downloaded fine dffb5dd [Nicholas Chammas] [SPARK-2627] download pep8 at runtime a1ce7ae [Nicholas Chammas] [SPARK-2627] space out test report sections 21da538 [Nicholas Chammas] [SPARK-2627] it's PEP 8, not PEP8 6f4900b [Nicholas Chammas] [SPARK-2627] more misc PEP 8 fixes fe57ed0 [Nicholas Chammas] removing merge conflict backups 9c01d4c [nchammas] Merge pull request #1 from apache/master 9a66cb0 [Nicholas Chammas] resolving merge conflicts a31ccc4 [Nicholas Chammas] [SPARK-2627] miscellaneous PEP 8 fixes beaa9ac [Nicholas Chammas] [SPARK-2627] fail check on non-zero status 723ed39 [Nicholas Chammas] always delete the report file 0541ebb [Nicholas Chammas] [SPARK-2627] call Python linter from run-tests 12440fa [Nicholas Chammas] [SPARK-2627] add Scala linter 61c07b9 [Nicholas Chammas] [SPARK-2627] add Python linter 75ad552 [Nicholas Chammas] make check output style consistent
183 lines
6 KiB
Python
183 lines
6 KiB
Python
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
"""
|
|
>>> from pyspark.conf import SparkConf
|
|
>>> from pyspark.context import SparkContext
|
|
>>> conf = SparkConf()
|
|
>>> conf.setMaster("local").setAppName("My app")
|
|
<pyspark.conf.SparkConf object at ...>
|
|
>>> conf.get("spark.master")
|
|
u'local'
|
|
>>> conf.get("spark.app.name")
|
|
u'My app'
|
|
>>> sc = SparkContext(conf=conf)
|
|
>>> sc.master
|
|
u'local'
|
|
>>> sc.appName
|
|
u'My app'
|
|
>>> sc.sparkHome is None
|
|
True
|
|
|
|
>>> conf = SparkConf(loadDefaults=False)
|
|
>>> conf.setSparkHome("/path")
|
|
<pyspark.conf.SparkConf object at ...>
|
|
>>> conf.get("spark.home")
|
|
u'/path'
|
|
>>> conf.setExecutorEnv("VAR1", "value1")
|
|
<pyspark.conf.SparkConf object at ...>
|
|
>>> conf.setExecutorEnv(pairs = [("VAR3", "value3"), ("VAR4", "value4")])
|
|
<pyspark.conf.SparkConf object at ...>
|
|
>>> conf.get("spark.executorEnv.VAR1")
|
|
u'value1'
|
|
>>> print conf.toDebugString()
|
|
spark.executorEnv.VAR1=value1
|
|
spark.executorEnv.VAR3=value3
|
|
spark.executorEnv.VAR4=value4
|
|
spark.home=/path
|
|
>>> sorted(conf.getAll(), key=lambda p: p[0])
|
|
[(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'), \
|
|
(u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]
|
|
"""
|
|
|
|
|
|
class SparkConf(object):
|
|
|
|
"""
|
|
Configuration for a Spark application. Used to set various Spark
|
|
parameters as key-value pairs.
|
|
|
|
Most of the time, you would create a SparkConf object with
|
|
C{SparkConf()}, which will load values from C{spark.*} Java system
|
|
properties as well. In this case, any parameters you set directly on
|
|
the C{SparkConf} object take priority over system properties.
|
|
|
|
For unit tests, you can also call C{SparkConf(false)} to skip
|
|
loading external settings and get the same configuration no matter
|
|
what the system properties are.
|
|
|
|
All setter methods in this class support chaining. For example,
|
|
you can write C{conf.setMaster("local").setAppName("My app")}.
|
|
|
|
Note that once a SparkConf object is passed to Spark, it is cloned
|
|
and can no longer be modified by the user.
|
|
"""
|
|
|
|
def __init__(self, loadDefaults=True, _jvm=None, _jconf=None):
|
|
"""
|
|
Create a new Spark configuration.
|
|
|
|
@param loadDefaults: whether to load values from Java system
|
|
properties (True by default)
|
|
@param _jvm: internal parameter used to pass a handle to the
|
|
Java VM; does not need to be set by users
|
|
@param _jconf: Optionally pass in an existing SparkConf handle
|
|
to use its parameters
|
|
"""
|
|
if _jconf:
|
|
self._jconf = _jconf
|
|
else:
|
|
from pyspark.context import SparkContext
|
|
SparkContext._ensure_initialized()
|
|
_jvm = _jvm or SparkContext._jvm
|
|
self._jconf = _jvm.SparkConf(loadDefaults)
|
|
|
|
def set(self, key, value):
|
|
"""Set a configuration property."""
|
|
self._jconf.set(key, unicode(value))
|
|
return self
|
|
|
|
def setIfMissing(self, key, value):
|
|
"""Set a configuration property, if not already set."""
|
|
if self.get(key) is None:
|
|
self.set(key, value)
|
|
return self
|
|
|
|
def setMaster(self, value):
|
|
"""Set master URL to connect to."""
|
|
self._jconf.setMaster(value)
|
|
return self
|
|
|
|
def setAppName(self, value):
|
|
"""Set application name."""
|
|
self._jconf.setAppName(value)
|
|
return self
|
|
|
|
def setSparkHome(self, value):
|
|
"""Set path where Spark is installed on worker nodes."""
|
|
self._jconf.setSparkHome(value)
|
|
return self
|
|
|
|
def setExecutorEnv(self, key=None, value=None, pairs=None):
|
|
"""Set an environment variable to be passed to executors."""
|
|
if (key is not None and pairs is not None) or (key is None and pairs is None):
|
|
raise Exception("Either pass one key-value pair or a list of pairs")
|
|
elif key is not None:
|
|
self._jconf.setExecutorEnv(key, value)
|
|
elif pairs is not None:
|
|
for (k, v) in pairs:
|
|
self._jconf.setExecutorEnv(k, v)
|
|
return self
|
|
|
|
def setAll(self, pairs):
|
|
"""
|
|
Set multiple parameters, passed as a list of key-value pairs.
|
|
|
|
@param pairs: list of key-value pairs to set
|
|
"""
|
|
for (k, v) in pairs:
|
|
self._jconf.set(k, v)
|
|
return self
|
|
|
|
def get(self, key, defaultValue=None):
|
|
"""Get the configured value for some key, or return a default otherwise."""
|
|
if defaultValue is None: # Py4J doesn't call the right get() if we pass None
|
|
if not self._jconf.contains(key):
|
|
return None
|
|
return self._jconf.get(key)
|
|
else:
|
|
return self._jconf.get(key, defaultValue)
|
|
|
|
def getAll(self):
|
|
"""Get all values as a list of key-value pairs."""
|
|
pairs = []
|
|
for elem in self._jconf.getAll():
|
|
pairs.append((elem._1(), elem._2()))
|
|
return pairs
|
|
|
|
def contains(self, key):
|
|
"""Does this configuration contain a given key?"""
|
|
return self._jconf.contains(key)
|
|
|
|
def toDebugString(self):
|
|
"""
|
|
Returns a printable version of the configuration, as a list of
|
|
key=value pairs, one per line.
|
|
"""
|
|
return self._jconf.toDebugString()
|
|
|
|
|
|
def _test():
|
|
import doctest
|
|
(failure_count, test_count) = doctest.testmod(optionflags=doctest.ELLIPSIS)
|
|
if failure_count:
|
|
exit(-1)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
_test()
|