Merge pull request #428 from woggling/mesos-exec-id

Make ExecutorIDs include SlaveIDs when running Mesos
This commit is contained in:
Matei Zaharia 2013-01-30 15:01:46 -08:00
commit 612a9fee71
2 changed files with 21 additions and 15 deletions

View file

@ -32,7 +32,11 @@ private[spark] class MesosExecutorBackend(executor: Executor)
logInfo("Registered with Mesos as executor ID " + executorInfo.getExecutorId.getValue)
this.driver = driver
val properties = Utils.deserialize[Array[(String, String)]](executorInfo.getData.toByteArray)
executor.initialize(executorInfo.getExecutorId.getValue, slaveInfo.getHostname, properties)
executor.initialize(
executorInfo.getExecutorId.getValue,
slaveInfo.getHostname,
properties
)
}
override def launchTask(d: ExecutorDriver, taskInfo: TaskInfo) {

View file

@ -51,7 +51,7 @@ private[spark] class MesosSchedulerBackend(
val taskIdToSlaveId = new HashMap[Long, String]
// An ExecutorInfo for our tasks
var executorInfo: ExecutorInfo = null
var execArgs: Array[Byte] = null
override def start() {
synchronized {
@ -70,12 +70,11 @@ private[spark] class MesosSchedulerBackend(
}
}.start()
executorInfo = createExecutorInfo()
waitForRegister()
}
}
def createExecutorInfo(): ExecutorInfo = {
def createExecutorInfo(execId: String): ExecutorInfo = {
val sparkHome = sc.getSparkHome().getOrElse(throw new SparkException(
"Spark home is not set; set it through the spark.home system " +
"property, the SPARK_HOME environment variable or the SparkContext constructor"))
@ -97,7 +96,7 @@ private[spark] class MesosSchedulerBackend(
.setEnvironment(environment)
.build()
ExecutorInfo.newBuilder()
.setExecutorId(ExecutorID.newBuilder().setValue("default").build())
.setExecutorId(ExecutorID.newBuilder().setValue(execId).build())
.setCommand(command)
.setData(ByteString.copyFrom(createExecArg()))
.addResources(memory)
@ -109,17 +108,20 @@ private[spark] class MesosSchedulerBackend(
* containing all the spark.* system properties in the form of (String, String) pairs.
*/
private def createExecArg(): Array[Byte] = {
val props = new HashMap[String, String]
val iterator = System.getProperties.entrySet.iterator
while (iterator.hasNext) {
val entry = iterator.next
val (key, value) = (entry.getKey.toString, entry.getValue.toString)
if (key.startsWith("spark.")) {
props(key) = value
if (execArgs == null) {
val props = new HashMap[String, String]
val iterator = System.getProperties.entrySet.iterator
while (iterator.hasNext) {
val entry = iterator.next
val (key, value) = (entry.getKey.toString, entry.getValue.toString)
if (key.startsWith("spark.")) {
props(key) = value
}
}
// Serialize the map as an array of (String, String) pairs
execArgs = Utils.serialize(props.toArray)
}
// Serialize the map as an array of (String, String) pairs
return Utils.serialize(props.toArray)
return execArgs
}
override def offerRescinded(d: SchedulerDriver, o: OfferID) {}
@ -216,7 +218,7 @@ private[spark] class MesosSchedulerBackend(
return MesosTaskInfo.newBuilder()
.setTaskId(taskId)
.setSlaveId(SlaveID.newBuilder().setValue(slaveId).build())
.setExecutor(executorInfo)
.setExecutor(createExecutorInfo(slaveId))
.setName(task.name)
.addResources(cpuResource)
.setData(ByteString.copyFrom(task.serializedTask))