Make sure that empty string is filtered out when we get the secondary jars from conf

Author: DB Tsai <dbtsai@dbtsai.com>

Closes #1027 from dbtsai/dbtsai-classloader and squashes the following commits:

9ac6be3 [DB Tsai] Fixed line too long
c9c7ad7 [DB Tsai] Make sure that empty string is filtered out when we get the secondary jars from conf.
This commit is contained in:
DB Tsai 2014-06-09 22:18:50 -07:00 committed by Xiangrui Meng
parent a9ec033c8c
commit 6f2db8c2f5

View file

@ -221,7 +221,7 @@ trait ClientBase extends Logging {
} }
} }
var cachedSecondaryJarLinks = ListBuffer.empty[String] val cachedSecondaryJarLinks = ListBuffer.empty[String]
val fileLists = List( (args.addJars, LocalResourceType.FILE, true), val fileLists = List( (args.addJars, LocalResourceType.FILE, true),
(args.files, LocalResourceType.FILE, false), (args.files, LocalResourceType.FILE, false),
(args.archives, LocalResourceType.ARCHIVE, false) ) (args.archives, LocalResourceType.ARCHIVE, false) )
@ -502,12 +502,14 @@ object ClientBase extends Logging {
def addClasspathEntry(path: String) = YarnSparkHadoopUtil.addToEnvironment(env, def addClasspathEntry(path: String) = YarnSparkHadoopUtil.addToEnvironment(env,
Environment.CLASSPATH.name, path, File.pathSeparator) Environment.CLASSPATH.name, path, File.pathSeparator)
/** Add entry to the classpath. Interpreted as a path relative to the working directory. */ /** Add entry to the classpath. Interpreted as a path relative to the working directory. */
def addPwdClasspathEntry(entry: String) = addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + entry) def addPwdClasspathEntry(entry: String) =
addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + entry)
extraClassPath.foreach(addClasspathEntry) extraClassPath.foreach(addClasspathEntry)
val cachedSecondaryJarLinks = val cachedSecondaryJarLinks =
sparkConf.getOption(CONF_SPARK_YARN_SECONDARY_JARS).getOrElse("").split(",") sparkConf.getOption(CONF_SPARK_YARN_SECONDARY_JARS).getOrElse("").split(",")
.filter(_.nonEmpty)
// Normally the users app.jar is last in case conflicts with spark jars // Normally the users app.jar is last in case conflicts with spark jars
if (sparkConf.get("spark.yarn.user.classpath.first", "false").toBoolean) { if (sparkConf.get("spark.yarn.user.classpath.first", "false").toBoolean) {
addPwdClasspathEntry(APP_JAR) addPwdClasspathEntry(APP_JAR)