SPARK-3020: Print completed indices rather than tasks in web UI

Author: Patrick Wendell <pwendell@gmail.com>

Closes #1933 from pwendell/speculation and squashes the following commits:

33a3473 [Patrick Wendell] Use OpenHashSet
8ce2ff0 [Patrick Wendell] SPARK-3020: Print completed indices rather than tasks in web UI
This commit is contained in:
Patrick Wendell 2014-08-13 18:08:38 -07:00 committed by Reynold Xin
parent 63d6777737
commit 0c7b452904
3 changed files with 4 additions and 1 deletions

View file

@ -153,6 +153,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
val (errorMessage, metrics): (Option[String], Option[TaskMetrics]) =
taskEnd.reason match {
case org.apache.spark.Success =>
stageData.completedIndices.add(info.index)
stageData.numCompleteTasks += 1
(None, Option(taskEnd.taskMetrics))
case e: ExceptionFailure => // Handle ExceptionFailure because we might have metrics

View file

@ -168,7 +168,7 @@ private[ui] class StageTableBase(
<td valign="middle">{submissionTime}</td>
<td sorttable_customkey={duration.getOrElse(-1).toString}>{formattedDuration}</td>
<td class="progress-cell">
{makeProgressBar(stageData.numActiveTasks, stageData.numCompleteTasks,
{makeProgressBar(stageData.numActiveTasks, stageData.completedIndices.size,
stageData.numFailedTasks, s.numTasks)}
</td>
<td sorttable_customekey={inputRead.toString}>{inputReadWithUnit}</td>

View file

@ -19,6 +19,7 @@ package org.apache.spark.ui.jobs
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo}
import org.apache.spark.util.collection.OpenHashSet
import scala.collection.mutable.HashMap
@ -38,6 +39,7 @@ private[jobs] object UIData {
class StageUIData {
var numActiveTasks: Int = _
var numCompleteTasks: Int = _
var completedIndices = new OpenHashSet[Int]()
var numFailedTasks: Int = _
var executorRunTime: Long = _