Merge pull request #788 from shane-huang/sparkjavaopts
For standalone mode, add worker local env setting of SPARK_JAVA_OPTS as ...
This commit is contained in:
commit
f94fc75c3f
|
@ -16,4 +16,9 @@
|
|||
# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
|
||||
# - SPARK_WORKER_INSTANCES, to set the number of worker instances/processes
|
||||
# to be spawned on every slave machine
|
||||
# - SPARK_JAVA_OPTS, to set the jvm options for executor backend. Note: This is
|
||||
# only for node-specific options, whereas app-specific options should be set
|
||||
# in the application.
|
||||
# Examples of node-speicic options : -Dspark.local.dir, GC related options.
|
||||
# Examples of app-specific options : -Dspark.serializer
|
||||
|
||||
|
|
|
@ -111,6 +111,7 @@ private[spark] class ExecutorRunner(
|
|||
val libraryOpts = getAppEnv("SPARK_LIBRARY_PATH")
|
||||
.map(p => List("-Djava.library.path=" + p))
|
||||
.getOrElse(Nil)
|
||||
val workerLocalOpts = Option(getenv("SPARK_JAVA_OPTS")).map(Utils.splitCommandString).getOrElse(Nil)
|
||||
val userOpts = getAppEnv("SPARK_JAVA_OPTS").map(Utils.splitCommandString).getOrElse(Nil)
|
||||
val memoryOpts = Seq("-Xms" + memory + "M", "-Xmx" + memory + "M")
|
||||
|
||||
|
@ -120,7 +121,7 @@ private[spark] class ExecutorRunner(
|
|||
Seq(sparkHome + "/bin/compute-classpath" + ext),
|
||||
extraEnvironment=appDesc.command.environment)
|
||||
|
||||
Seq("-cp", classPath) ++ libraryOpts ++ userOpts ++ memoryOpts
|
||||
Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
|
||||
}
|
||||
|
||||
/** Spawn a thread that will redirect a given stream to a file */
|
||||
|
|
Loading…
Reference in a new issue