Merge pull request #467 from squito/executor_job_id
include jobid in Executor commandline args
This commit is contained in:
commit
beb7ab8708
|
@ -77,7 +77,7 @@ private[spark] class ExecutorRunner(
|
|||
val command = jobDesc.command
|
||||
val script = if (System.getProperty("os.name").startsWith("Windows")) "run.cmd" else "run";
|
||||
val runScript = new File(sparkHome, script).getCanonicalPath
|
||||
Seq(runScript, command.mainClass) ++ command.arguments.map(substituteVariables)
|
||||
Seq(runScript, command.mainClass) ++ (command.arguments ++ Seq(jobId)).map(substituteVariables)
|
||||
}
|
||||
|
||||
/** Spawn a thread that will redirect a given stream to a file */
|
||||
|
|
|
@ -68,8 +68,9 @@ private[spark] object StandaloneExecutorBackend {
|
|||
}
|
||||
|
||||
def main(args: Array[String]) {
|
||||
if (args.length != 4) {
|
||||
System.err.println("Usage: StandaloneExecutorBackend <driverUrl> <executorId> <hostname> <cores>")
|
||||
if (args.length < 4) {
|
||||
//the reason we allow the last frameworkId argument is to make it easy to kill rogue executors
|
||||
System.err.println("Usage: StandaloneExecutorBackend <driverUrl> <executorId> <hostname> <cores> [<appid>]")
|
||||
System.exit(1)
|
||||
}
|
||||
run(args(0), args(1), args(2), args(3).toInt)
|
||||
|
|
Loading…
Reference in a new issue