Incorporate pwendell's code review suggestions.

This commit is contained in:
Tor Myklebust 2013-12-16 23:14:52 -05:00
parent 882d544856
commit 963d6f065a
4 changed files with 8 additions and 9 deletions

View file

@ -222,9 +222,9 @@ private[spark] class Executor(
return
}
val objectSer = SparkEnv.get.serializer.newInstance()
val resultSer = SparkEnv.get.serializer.newInstance()
val beforeSerialization = System.currentTimeMillis()
val valueBytes = objectSer.serialize(value)
val valueBytes = resultSer.serialize(value)
val afterSerialization = System.currentTimeMillis()
for (m <- task.metrics) {
@ -232,7 +232,7 @@ private[spark] class Executor(
m.executorDeserializeTime = (taskStart - startTime).toInt
m.executorRunTime = (taskFinish - taskStart).toInt
m.jvmGCTime = gcTime - startGCTime
m.serializationTime = (afterSerialization - beforeSerialization).toInt
m.resultSerializationTime = (afterSerialization - beforeSerialization).toInt
}
val accumUpdates = Accumulators.values

View file

@ -44,9 +44,9 @@ class TaskMetrics extends Serializable {
var jvmGCTime: Long = _
/**
* Amount of time spent serializing the result of the task
* Amount of time spent serializing the task result
*/
var serializationTime: Long = _
var resultSerializationTime: Long = _
/**
* If this task reads from shuffle output, metrics on getting shuffle data will be collected here

View file

@ -102,8 +102,8 @@ private[spark] class StagePage(parent: JobProgressUI) {
}
else {
val serializationTimes = validTasks.map{case (info, metrics, exception) =>
metrics.get.serializationTime.toDouble}
val serializationQuantiles = "Serialization Time" +: Distribution(serializationTimes).get.getQuantiles().map(
metrics.get.resultSerializationTime.toDouble}
val serializationQuantiles = "Result serialization time" +: Distribution(serializationTimes).get.getQuantiles().map(
ms => parent.formatDuration(ms.toLong))
val serviceTimes = validTasks.map{case (info, metrics, exception) =>

View file

@ -314,7 +314,6 @@ class ClusterTaskSetManagerSuite extends FunSuite with LocalSparkContext with Lo
def createTaskResult(id: Int): DirectTaskResult[Int] = {
val objectSer = SparkEnv.get.serializer.newInstance()
new DirectTaskResult[Int](objectSer.serialize(id), mutable.Map.empty,
new TaskMetrics)
new DirectTaskResult[Int](objectSer.serialize(id), mutable.Map.empty, new TaskMetrics)
}
}