2013-12-29 14:03:39 -05:00
|
|
|
#
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
|
|
|
|
"""
|
|
|
|
>>> from pyspark.conf import SparkConf
|
|
|
|
>>> from pyspark.context import SparkContext
|
|
|
|
>>> conf = SparkConf()
|
|
|
|
>>> conf.setMaster("local").setAppName("My app")
|
|
|
|
<pyspark.conf.SparkConf object at ...>
|
|
|
|
>>> conf.get("spark.master")
|
|
|
|
u'local'
|
2013-12-29 22:19:33 -05:00
|
|
|
>>> conf.get("spark.app.name")
|
2013-12-29 14:03:39 -05:00
|
|
|
u'My app'
|
|
|
|
>>> sc = SparkContext(conf=conf)
|
|
|
|
>>> sc.master
|
|
|
|
u'local'
|
|
|
|
>>> sc.appName
|
|
|
|
u'My app'
|
2014-07-16 00:34:05 -04:00
|
|
|
>>> sc.sparkHome is None
|
2013-12-29 14:03:39 -05:00
|
|
|
True
|
|
|
|
|
2014-05-14 17:57:17 -04:00
|
|
|
>>> conf = SparkConf(loadDefaults=False)
|
2013-12-29 14:03:39 -05:00
|
|
|
>>> conf.setSparkHome("/path")
|
|
|
|
<pyspark.conf.SparkConf object at ...>
|
|
|
|
>>> conf.get("spark.home")
|
|
|
|
u'/path'
|
|
|
|
>>> conf.setExecutorEnv("VAR1", "value1")
|
|
|
|
<pyspark.conf.SparkConf object at ...>
|
|
|
|
>>> conf.setExecutorEnv(pairs = [("VAR3", "value3"), ("VAR4", "value4")])
|
|
|
|
<pyspark.conf.SparkConf object at ...>
|
|
|
|
>>> conf.get("spark.executorEnv.VAR1")
|
|
|
|
u'value1'
|
2015-04-16 19:20:57 -04:00
|
|
|
>>> print(conf.toDebugString())
|
2013-12-30 22:17:28 -05:00
|
|
|
spark.executorEnv.VAR1=value1
|
|
|
|
spark.executorEnv.VAR3=value3
|
|
|
|
spark.executorEnv.VAR4=value4
|
|
|
|
spark.home=/path
|
2013-12-29 14:03:39 -05:00
|
|
|
>>> sorted(conf.getAll(), key=lambda p: p[0])
|
2014-07-22 01:30:53 -04:00
|
|
|
[(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'), \
|
|
|
|
(u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]
|
2013-12-29 14:03:39 -05:00
|
|
|
"""
|
|
|
|
|
2014-09-03 14:49:45 -04:00
|
|
|
__all__ = ['SparkConf']
|
|
|
|
|
2015-04-16 19:20:57 -04:00
|
|
|
import sys
|
|
|
|
import re
|
|
|
|
|
|
|
|
if sys.version > '3':
|
|
|
|
unicode = str
|
|
|
|
__doc__ = re.sub(r"(\W|^)[uU](['])", r'\1\2', __doc__)
|
|
|
|
|
2013-12-29 14:03:39 -05:00
|
|
|
|
|
|
|
class SparkConf(object):
|
2014-08-06 15:58:24 -04:00
|
|
|
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
|
|
|
Configuration for a Spark application. Used to set various Spark
|
|
|
|
parameters as key-value pairs.
|
|
|
|
|
|
|
|
Most of the time, you would create a SparkConf object with
|
2013-12-29 20:15:07 -05:00
|
|
|
C{SparkConf()}, which will load values from C{spark.*} Java system
|
2014-01-18 19:17:34 -05:00
|
|
|
properties as well. In this case, any parameters you set directly on
|
|
|
|
the C{SparkConf} object take priority over system properties.
|
2013-12-29 14:46:59 -05:00
|
|
|
|
|
|
|
For unit tests, you can also call C{SparkConf(false)} to skip
|
|
|
|
loading external settings and get the same configuration no matter
|
2014-01-18 19:17:34 -05:00
|
|
|
what the system properties are.
|
2013-12-29 14:46:59 -05:00
|
|
|
|
|
|
|
All setter methods in this class support chaining. For example,
|
|
|
|
you can write C{conf.setMaster("local").setAppName("My app")}.
|
2013-12-30 22:17:28 -05:00
|
|
|
|
|
|
|
Note that once a SparkConf object is passed to Spark, it is cloned
|
|
|
|
and can no longer be modified by the user.
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
|
|
|
|
2014-02-21 00:20:39 -05:00
|
|
|
def __init__(self, loadDefaults=True, _jvm=None, _jconf=None):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
|
|
|
Create a new Spark configuration.
|
|
|
|
|
2014-10-07 21:09:27 -04:00
|
|
|
:param loadDefaults: whether to load values from Java system
|
2014-01-18 19:17:34 -05:00
|
|
|
properties (True by default)
|
2014-10-07 21:09:27 -04:00
|
|
|
:param _jvm: internal parameter used to pass a handle to the
|
2013-12-30 22:17:28 -05:00
|
|
|
Java VM; does not need to be set by users
|
2014-10-07 21:09:27 -04:00
|
|
|
:param _jconf: Optionally pass in an existing SparkConf handle
|
2014-02-21 00:20:39 -05:00
|
|
|
to use its parameters
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
2014-02-21 00:20:39 -05:00
|
|
|
if _jconf:
|
|
|
|
self._jconf = _jconf
|
|
|
|
else:
|
|
|
|
from pyspark.context import SparkContext
|
|
|
|
SparkContext._ensure_initialized()
|
|
|
|
_jvm = _jvm or SparkContext._jvm
|
|
|
|
self._jconf = _jvm.SparkConf(loadDefaults)
|
2013-12-29 14:03:39 -05:00
|
|
|
|
|
|
|
def set(self, key, value):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Set a configuration property."""
|
2014-01-01 22:03:39 -05:00
|
|
|
self._jconf.set(key, unicode(value))
|
2013-12-29 14:03:39 -05:00
|
|
|
return self
|
|
|
|
|
2014-07-24 21:15:37 -04:00
|
|
|
def setIfMissing(self, key, value):
|
|
|
|
"""Set a configuration property, if not already set."""
|
|
|
|
if self.get(key) is None:
|
|
|
|
self.set(key, value)
|
|
|
|
return self
|
|
|
|
|
2013-12-29 14:03:39 -05:00
|
|
|
def setMaster(self, value):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Set master URL to connect to."""
|
2013-12-29 14:03:39 -05:00
|
|
|
self._jconf.setMaster(value)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def setAppName(self, value):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Set application name."""
|
2013-12-29 14:03:39 -05:00
|
|
|
self._jconf.setAppName(value)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def setSparkHome(self, value):
|
2013-12-30 22:17:28 -05:00
|
|
|
"""Set path where Spark is installed on worker nodes."""
|
2013-12-29 14:03:39 -05:00
|
|
|
self._jconf.setSparkHome(value)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def setExecutorEnv(self, key=None, value=None, pairs=None):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Set an environment variable to be passed to executors."""
|
2014-07-16 00:34:05 -04:00
|
|
|
if (key is not None and pairs is not None) or (key is None and pairs is None):
|
2013-12-29 14:03:39 -05:00
|
|
|
raise Exception("Either pass one key-value pair or a list of pairs")
|
2014-07-22 01:30:53 -04:00
|
|
|
elif key is not None:
|
2013-12-29 14:03:39 -05:00
|
|
|
self._jconf.setExecutorEnv(key, value)
|
2014-07-22 01:30:53 -04:00
|
|
|
elif pairs is not None:
|
2013-12-29 14:03:39 -05:00
|
|
|
for (k, v) in pairs:
|
|
|
|
self._jconf.setExecutorEnv(k, v)
|
|
|
|
return self
|
|
|
|
|
|
|
|
def setAll(self, pairs):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
|
|
|
Set multiple parameters, passed as a list of key-value pairs.
|
|
|
|
|
2014-10-07 21:09:27 -04:00
|
|
|
:param pairs: list of key-value pairs to set
|
2013-12-29 14:46:59 -05:00
|
|
|
"""
|
2013-12-29 14:03:39 -05:00
|
|
|
for (k, v) in pairs:
|
|
|
|
self._jconf.set(k, v)
|
|
|
|
return self
|
|
|
|
|
2014-01-01 22:03:39 -05:00
|
|
|
def get(self, key, defaultValue=None):
|
|
|
|
"""Get the configured value for some key, or return a default otherwise."""
|
2014-07-22 01:30:53 -04:00
|
|
|
if defaultValue is None: # Py4J doesn't call the right get() if we pass None
|
2014-01-01 23:21:34 -05:00
|
|
|
if not self._jconf.contains(key):
|
|
|
|
return None
|
|
|
|
return self._jconf.get(key)
|
|
|
|
else:
|
|
|
|
return self._jconf.get(key, defaultValue)
|
2013-12-29 14:03:39 -05:00
|
|
|
|
|
|
|
def getAll(self):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Get all values as a list of key-value pairs."""
|
2013-12-29 14:03:39 -05:00
|
|
|
pairs = []
|
|
|
|
for elem in self._jconf.getAll():
|
|
|
|
pairs.append((elem._1(), elem._2()))
|
|
|
|
return pairs
|
|
|
|
|
|
|
|
def contains(self, key):
|
2013-12-29 14:46:59 -05:00
|
|
|
"""Does this configuration contain a given key?"""
|
2013-12-29 14:03:39 -05:00
|
|
|
return self._jconf.contains(key)
|
|
|
|
|
2013-12-30 22:17:28 -05:00
|
|
|
def toDebugString(self):
|
|
|
|
"""
|
|
|
|
Returns a printable version of the configuration, as a list of
|
|
|
|
key=value pairs, one per line.
|
|
|
|
"""
|
|
|
|
return self._jconf.toDebugString()
|
|
|
|
|
2013-12-29 14:03:39 -05:00
|
|
|
|
|
|
|
def _test():
|
|
|
|
import doctest
|
|
|
|
(failure_count, test_count) = doctest.testmod(optionflags=doctest.ELLIPSIS)
|
|
|
|
if failure_count:
|
|
|
|
exit(-1)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
_test()
|