[SPARK-6048] SparkConf should not translate deprecated configs on set

There are multiple issues with translating on set outlined in the JIRA.

This PR reverts the translation logic added to `SparkConf`. In the future, after the 1.3.0 release we will figure out a way to reorganize the internal structure more elegantly. For now, let's preserve the existing semantics of `SparkConf` since it's a public interface. Unfortunately this means duplicating some code for now, but this is all internal and we can always clean it up later.

Author: Andrew Or <andrew@databricks.com>

Closes #4799 from andrewor14/conf-set-translate and squashes the following commits:

11c525b [Andrew Or] Move warning to driver
10e77b5 [Andrew Or] Add documentation for deprecation precedence
a369cb1 [Andrew Or] Merge branch 'master' of github.com:apache/spark into conf-set-translate
c26a9e3 [Andrew Or] Revert all translate logic in SparkConf
fef6c9c [Andrew Or] Restore deprecation logic for spark.executor.userClassPathFirst
94b4dfa [Andrew Or] Translate on get, not set
This commit is contained in:
Andrew Or 2015-03-02 16:36:42 -08:00 committed by Patrick Wendell
parent 6776cb33ea
commit 258d154c9f
5 changed files with 25 additions and 22 deletions

View file

@ -68,7 +68,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
if (value == null) { if (value == null) {
throw new NullPointerException("null value for " + key) throw new NullPointerException("null value for " + key)
} }
settings.put(translateConfKey(key, warn = true), value) settings.put(key, value)
this this
} }
@ -140,7 +140,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Set a parameter if it isn't already configured */ /** Set a parameter if it isn't already configured */
def setIfMissing(key: String, value: String): SparkConf = { def setIfMissing(key: String, value: String): SparkConf = {
settings.putIfAbsent(translateConfKey(key, warn = true), value) settings.putIfAbsent(key, value)
this this
} }
@ -176,7 +176,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Get a parameter as an Option */ /** Get a parameter as an Option */
def getOption(key: String): Option[String] = { def getOption(key: String): Option[String] = {
Option(settings.get(translateConfKey(key))) Option(settings.get(key))
} }
/** Get all parameters as a list of pairs */ /** Get all parameters as a list of pairs */
@ -229,7 +229,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
def getAppId: String = get("spark.app.id") def getAppId: String = get("spark.app.id")
/** Does the configuration contain a given parameter? */ /** Does the configuration contain a given parameter? */
def contains(key: String): Boolean = settings.containsKey(translateConfKey(key)) def contains(key: String): Boolean = settings.containsKey(key)
/** Copy this object */ /** Copy this object */
override def clone: SparkConf = { override def clone: SparkConf = {
@ -343,6 +343,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
} }
} }
} }
// Warn against the use of deprecated configs
deprecatedConfigs.values.foreach { dc =>
if (contains(dc.oldName)) {
dc.warn()
}
}
} }
/** /**

View file

@ -92,6 +92,12 @@ private[spark] class Executor(
private val executorActor = env.actorSystem.actorOf( private val executorActor = env.actorSystem.actorOf(
Props(new ExecutorActor(executorId)), "ExecutorActor") Props(new ExecutorActor(executorId)), "ExecutorActor")
// Whether to load classes in user jars before those in Spark jars
private val userClassPathFirst: Boolean = {
conf.getBoolean("spark.executor.userClassPathFirst",
conf.getBoolean("spark.files.userClassPathFirst", false))
}
// Create our ClassLoader // Create our ClassLoader
// do this after SparkEnv creation so can access the SecurityManager // do this after SparkEnv creation so can access the SecurityManager
private val urlClassLoader = createClassLoader() private val urlClassLoader = createClassLoader()
@ -309,7 +315,7 @@ private[spark] class Executor(
val urls = userClassPath.toArray ++ currentJars.keySet.map { uri => val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
new File(uri.split("/").last).toURI.toURL new File(uri.split("/").last).toURI.toURL
} }
if (conf.getBoolean("spark.executor.userClassPathFirst", false)) { if (userClassPathFirst) {
new ChildFirstURLClassLoader(urls, currentLoader) new ChildFirstURLClassLoader(urls, currentLoader)
} else { } else {
new MutableURLClassLoader(urls, currentLoader) new MutableURLClassLoader(urls, currentLoader)
@ -324,14 +330,13 @@ private[spark] class Executor(
val classUri = conf.get("spark.repl.class.uri", null) val classUri = conf.get("spark.repl.class.uri", null)
if (classUri != null) { if (classUri != null) {
logInfo("Using REPL class URI: " + classUri) logInfo("Using REPL class URI: " + classUri)
val userClassPathFirst: java.lang.Boolean =
conf.getBoolean("spark.executor.userClassPathFirst", false)
try { try {
val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader") val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]] .asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[SparkConf], classOf[String], val constructor = klass.getConstructor(classOf[SparkConf], classOf[String],
classOf[ClassLoader], classOf[Boolean]) classOf[ClassLoader], classOf[Boolean])
constructor.newInstance(conf, classUri, parent, userClassPathFirst) constructor.newInstance(conf, classUri, parent, _userClassPathFirst)
} catch { } catch {
case _: ClassNotFoundException => case _: ClassNotFoundException =>
logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!") logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")

View file

@ -197,18 +197,6 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
serializer.newInstance().serialize(new StringBuffer()) serializer.newInstance().serialize(new StringBuffer())
} }
test("deprecated config keys") {
val conf = new SparkConf()
.set("spark.files.userClassPathFirst", "true")
.set("spark.yarn.user.classpath.first", "true")
assert(conf.contains("spark.files.userClassPathFirst"))
assert(conf.contains("spark.executor.userClassPathFirst"))
assert(conf.contains("spark.yarn.user.classpath.first"))
assert(conf.getBoolean("spark.files.userClassPathFirst", false))
assert(conf.getBoolean("spark.executor.userClassPathFirst", false))
assert(conf.getBoolean("spark.yarn.user.classpath.first", false))
}
} }
class Class1 {} class Class1 {}

View file

@ -70,7 +70,9 @@ each line consists of a key and a value separated by whitespace. For example:
Any values specified as flags or in the properties file will be passed on to the application Any values specified as flags or in the properties file will be passed on to the application
and merged with those specified through SparkConf. Properties set directly on the SparkConf and merged with those specified through SparkConf. Properties set directly on the SparkConf
take highest precedence, then flags passed to `spark-submit` or `spark-shell`, then options take highest precedence, then flags passed to `spark-submit` or `spark-shell`, then options
in the `spark-defaults.conf` file. in the `spark-defaults.conf` file. A few configuration keys have been renamed since earlier
versions of Spark; in such cases, the older key names are still accepted, but take lower
precedence than any instance of the newer key.
## Viewing Spark Properties ## Viewing Spark Properties

View file

@ -955,7 +955,8 @@ object Client extends Logging {
if (isDriver) { if (isDriver) {
conf.getBoolean("spark.driver.userClassPathFirst", false) conf.getBoolean("spark.driver.userClassPathFirst", false)
} else { } else {
conf.getBoolean("spark.executor.userClassPathFirst", false) conf.getBoolean("spark.executor.userClassPathFirst",
conf.getBoolean("spark.files.userClassPathFirst", false))
} }
} }