diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala index 345dfe879c..1dcdf20960 100644 --- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala @@ -22,6 +22,9 @@ import java.lang.System.getenv import akka.actor.ActorRef +import com.google.common.base.Charsets +import com.google.common.io.Files + import spark.{Utils, Logging} import spark.deploy.{ExecutorState, ApplicationDescription} import spark.deploy.DeployMessages.ExecutorStateChanged @@ -125,7 +128,7 @@ private[spark] class ExecutorRunner( /** Spawn a thread that will redirect a given stream to a file */ def redirectStream(in: InputStream, file: File) { - val out = new FileOutputStream(file) + val out = new FileOutputStream(file, true) new Thread("redirect output to " + file) { override def run() { try { @@ -161,9 +164,16 @@ private[spark] class ExecutorRunner( env.put("SPARK_LAUNCH_WITH_SCALA", "0") process = builder.start() + val header = "Spark Executor Command: %s\n======================================\n\n" + .format(command.mkString(" ")) + // Redirect its stdout and stderr to files - redirectStream(process.getInputStream, new File(executorDir, "stdout")) - redirectStream(process.getErrorStream, new File(executorDir, "stderr")) + val stdout = new File(executorDir, "stdout") + Files.write(header, stdout, Charsets.UTF_8) + redirectStream(process.getInputStream, stdout) + + val stderr = new File(executorDir, "stderr") + redirectStream(process.getErrorStream, stderr) // Wait for it to exit; this is actually a bad thing if it happens, because we expect to run // long-lived processes only. However, in the future, we might restart the executor a few