Do not copy local jars given to SparkContext in yarn mode since the Context is not running on local. This bug causes failure when jars can not be found. Example codes (such as spark.examples.SparkPi) can not work without this fix under yarn mode.
This commit is contained in:
parent
0d0a47cb4f
commit
a613628c50
|
@ -102,6 +102,7 @@ class SparkContext(
|
|||
}
|
||||
|
||||
private val isLocal = (master == "local" || master.startsWith("local["))
|
||||
private val isYarn = (master == "yarn-standalone")
|
||||
|
||||
// Create the Spark execution environment (cache, map output tracker, etc)
|
||||
private[spark] val env = SparkEnv.createFromSystemProperties(
|
||||
|
@ -577,13 +578,20 @@ class SparkContext(
|
|||
} else {
|
||||
val uri = new URI(path)
|
||||
val key = uri.getScheme match {
|
||||
case null | "file" => env.httpFileServer.addJar(new File(uri.getPath))
|
||||
case null | "file" =>
|
||||
if (!isYarn)
|
||||
env.httpFileServer.addJar(new File(uri.getPath))
|
||||
else
|
||||
null
|
||||
case _ => path
|
||||
}
|
||||
|
||||
if (key != null) {
|
||||
addedJars(key) = System.currentTimeMillis
|
||||
logInfo("Added JAR " + path + " at " + key + " with timestamp " + addedJars(key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the job's list of JARs added by `addJar` so that they do not get downloaded to
|
||||
|
|
Loading…
Reference in a new issue