Export --file for YarnClient mode to support sending extra files to worker on yarn cluster

This commit is contained in:
Raymond Liu 2014-01-06 16:12:25 +08:00
parent da4694a0d8
commit 67af803136
2 changed files with 5 additions and 1 deletions

View file

@ -114,6 +114,8 @@ For example:
SPARK_YARN_APP_JAR=examples/target/scala-{{site.SCALA_VERSION}}/spark-examples-assembly-{{site.SPARK_VERSION}}.jar \
MASTER=yarn-client ./bin/spark-shell
You can also send extra files to yarn cluster for worker to use by exporting SPARK_YARN_DIST_FILES=file1,file2... etc.
# Building Spark for Hadoop/YARN 2.2.x
See [Building Spark with Maven](building-with-maven.html) for instructions on how to build Spark using the Maven process.

View file

@ -39,6 +39,7 @@ private[spark] class YarnClientSchedulerBackend(
val defaultWorkerNumber = "1"
val userJar = System.getenv("SPARK_YARN_APP_JAR")
val distFiles = System.getenv("SPARK_YARN_DIST_FILES")
var workerCores = System.getenv("SPARK_WORKER_CORES")
var workerMemory = System.getenv("SPARK_WORKER_MEMORY")
var workerNumber = System.getenv("SPARK_WORKER_INSTANCES")
@ -64,7 +65,8 @@ private[spark] class YarnClientSchedulerBackend(
"--worker-memory", workerMemory,
"--worker-cores", workerCores,
"--num-workers", workerNumber,
"--master-class", "org.apache.spark.deploy.yarn.WorkerLauncher"
"--master-class", "org.apache.spark.deploy.yarn.WorkerLauncher",
"--files", distFiles
)
val args = new ClientArguments(argsArray, conf)