Properly show Spark properties on web UI, and change app name property

This commit is contained in:
Matei Zaharia 2013-12-29 22:19:33 -05:00
parent eaa8a68ff0
commit 994f080f8a
6 changed files with 15 additions and 12 deletions

View file

@ -54,7 +54,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
/** Set a name for your application. Shown in the Spark web UI. */
def setAppName(name: String): SparkConf = {
if (name != null) {
settings("spark.appName") = name
settings("spark.app.name") = name
}
this
}

View file

@ -112,7 +112,7 @@ class SparkContext(
if (!conf.contains("spark.master")) {
throw new SparkException("A master URL must be set in your configuration")
}
if (!conf.contains("spark.appName")) {
if (!conf.contains("spark.app.name")) {
throw new SparkException("An application must be set in your configuration")
}
@ -127,7 +127,7 @@ class SparkContext(
}
val master = conf.get("spark.master")
val appName = conf.get("spark.appName")
val appName = conf.get("spark.app.name")
val isLocal = (master == "local" || master.startsWith("local["))

View file

@ -48,12 +48,15 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
def jvmTable =
UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true)
val properties = System.getProperties.iterator.toSeq
val classPathProperty = properties.find { case (k, v) =>
k.contains("java.class.path")
val sparkProperties = sc.conf.getAll.sorted
val systemProperties = System.getProperties.iterator.toSeq
val classPathProperty = systemProperties.find { case (k, v) =>
k == "java.class.path"
}.getOrElse(("", ""))
val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
val otherProperties = systemProperties.filter { case (k, v) =>
k != "java.class.path" && !k.startsWith("spark.")
}.sorted
val propertyHeaders = Seq("Name", "Value")
def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>

View file

@ -46,7 +46,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext {
conf.setExecutorEnv(Seq(("VAR2", "value2"), ("VAR3", "value3")))
assert(conf.get("spark.master") === "local[3]")
assert(conf.get("spark.appName") === "My app")
assert(conf.get("spark.app.name") === "My app")
assert(conf.get("spark.home") === "/path")
assert(conf.get("spark.jars") === "a.jar,b.jar")
assert(conf.get("spark.executorEnv.VAR1") === "value1")

View file

@ -23,7 +23,7 @@
<pyspark.conf.SparkConf object at ...>
>>> conf.get("spark.master")
u'local'
>>> conf.get("spark.appName")
>>> conf.get("spark.app.name")
u'My app'
>>> sc = SparkContext(conf=conf)
>>> sc.master

View file

@ -104,13 +104,13 @@ class SparkContext(object):
# Check that we have at least the required parameters
if not self.conf.contains("spark.master"):
raise Exception("A master URL must be set in your configuration")
if not self.conf.contains("spark.appName"):
if not self.conf.contains("spark.app.name"):
raise Exception("An application name must be set in your configuration")
# Read back our properties from the conf in case we loaded some of them from
# the classpath or an external config file
self.master = self.conf.get("spark.master")
self.appName = self.conf.get("spark.appName")
self.appName = self.conf.get("spark.app.name")
self.sparkHome = self.conf.getOrElse("spark.home", None)
for (k, v) in self.conf.getAll():
if k.startswith("spark.executorEnv."):