For standalone mode, add worker local env setting of SPARK_JAVA_OPTS as default and let application env override default options if applicable
Signed-off-by: shane-huang <shengsheng.huang@intel.com>
This commit is contained in:
parent
7c4b7a53b1
commit
cbc5107e36
|
@ -111,6 +111,7 @@ private[spark] class ExecutorRunner(
|
|||
val libraryOpts = getAppEnv("SPARK_LIBRARY_PATH")
|
||||
.map(p => List("-Djava.library.path=" + p))
|
||||
.getOrElse(Nil)
|
||||
val workerLocalOpts = Option(getenv("SPARK_JAVA_OPTS")).map(Utils.splitCommandString).getOrElse(Nil)
|
||||
val userOpts = getAppEnv("SPARK_JAVA_OPTS").map(Utils.splitCommandString).getOrElse(Nil)
|
||||
val memoryOpts = Seq("-Xms" + memory + "M", "-Xmx" + memory + "M")
|
||||
|
||||
|
@ -120,7 +121,7 @@ private[spark] class ExecutorRunner(
|
|||
Seq(sparkHome + "/bin/compute-classpath" + ext),
|
||||
extraEnvironment=appDesc.command.environment)
|
||||
|
||||
Seq("-cp", classPath) ++ libraryOpts ++ userOpts ++ memoryOpts
|
||||
Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
|
||||
}
|
||||
|
||||
/** Spawn a thread that will redirect a given stream to a file */
|
||||
|
|
Loading…
Reference in a new issue