diff --git a/core/src/main/resources/spark/ui/static/webui.css b/core/src/main/resources/spark/ui/static/webui.css index 70fd0ba31e..9914a8ad2a 100644 --- a/core/src/main/resources/spark/ui/static/webui.css +++ b/core/src/main/resources/spark/ui/static/webui.css @@ -49,6 +49,10 @@ line-height: 15px !important; } +.table-fixed { + table-layout:fixed; +} + .table td { vertical-align: middle !important; } diff --git a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala index 60fbcbfad6..47936e2bad 100644 --- a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala +++ b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala @@ -53,7 +53,7 @@ private[spark] class IndexPage(parent: MasterWebUI) { val workers = state.workers.sortBy(_.id) val workerTable = UIUtils.listingTable(workerHeaders, workerRow, workers) - val appHeaders = Seq("ID", "Name", "Cores", "Memory per Node", "Launch Time", "User", + val appHeaders = Seq("ID", "Name", "Cores", "Memory per Node", "Submitted Time", "User", "State", "Duration") val activeApps = state.activeApps.sortBy(_.startTime).reverse val activeAppsTable = UIUtils.listingTable(appHeaders, appRow, activeApps) diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/spark/ui/UIUtils.scala index ee7a8b482e..fe2afc1129 100644 --- a/core/src/main/scala/spark/ui/UIUtils.scala +++ b/core/src/main/scala/spark/ui/UIUtils.scala @@ -125,9 +125,21 @@ private[spark] object UIUtils { } /** Returns an HTML table constructed by generating a row for each object in a sequence. */ - def listingTable[T](headers: Seq[String], makeRow: T => Seq[Node], rows: Seq[T]): Seq[Node] = { - - {headers.map(h => )} + def listingTable[T]( + headers: Seq[String], + makeRow: T => Seq[Node], + rows: Seq[T], + fixedWidth: Boolean = false): Seq[Node] = { + + val colWidth = 100.toDouble / headers.size + val colWidthAttr = if (fixedWidth) colWidth + "%" else "" + var tableClass = "table table-bordered table-striped table-condensed sortable" + if (fixedWidth) { + tableClass += " table-fixed" + } + +
{h}
+ {headers.map(h => )} {rows.map(r => makeRow(r))} diff --git a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala index 6ee58cda2d..b3e28ce317 100644 --- a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala +++ b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala @@ -45,7 +45,8 @@ private[spark] class EnvironmentUI(sc: SparkContext) { ("Scala Home", Properties.scalaHome) ).sorted def jvmRow(kv: (String, String)) = - def jvmTable = UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation) + def jvmTable = + UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true) val properties = System.getProperties.iterator.toSeq val classPathProperty = properties.find { case (k, v) => @@ -56,8 +57,10 @@ private[spark] class EnvironmentUI(sc: SparkContext) { val propertyHeaders = Seq("Name", "Value") def propertyRow(kv: (String, String)) = - val sparkPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties) - val otherPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties) + val sparkPropertyTable = + UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties, fixedWidth = true) + val otherPropertyTable = + UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties, fixedWidth = true) val classPathEntries = classPathProperty._2 .split(System.getProperty("path.separator", ":")) @@ -69,17 +72,21 @@ private[spark] class EnvironmentUI(sc: SparkContext) { val classPathHeaders = Seq("Resource", "Source") def classPathRow(data: (String, String)) = - val classPathTable = UIUtils.listingTable(classPathHeaders, classPathRow, classPath) + val classPathTable = + UIUtils.listingTable(classPathHeaders, classPathRow, classPath, fixedWidth = true) val content =

Runtime Information

{jvmTable}
-

{sparkProperties.size} Spark Properties

{sparkPropertyTable} +

{sparkProperties.size} Spark Properties

+ {sparkPropertyTable}
-

{otherProperties.size} System Properties

{otherPropertyTable} +

{otherProperties.size} System Properties

+ {otherPropertyTable}
-

{classPath.size} Classpath Entries

{classPathTable} +

{classPath.size} Classpath Entries

+ {classPathTable}
UIUtils.headerSparkPage(content, sc, "Environment", Environment) diff --git a/core/src/main/scala/spark/ui/jobs/PoolTable.scala b/core/src/main/scala/spark/ui/jobs/PoolTable.scala index 12fb5f0b89..621828f9c3 100644 --- a/core/src/main/scala/spark/ui/jobs/PoolTable.scala +++ b/core/src/main/scala/spark/ui/jobs/PoolTable.scala @@ -21,7 +21,7 @@ private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressLis private def poolTable(makeRow: (Schedulable, HashMap[String, HashSet[Stage]]) => Seq[Node], rows: Seq[Schedulable] ): Seq[Node] = { -
{h}
{kv._1}{kv._2}
{kv._1}{kv._2}
{data._1}{data._2}
+
diff --git a/core/src/main/scala/spark/ui/jobs/StagePage.scala b/core/src/main/scala/spark/ui/jobs/StagePage.scala index 8e2458f94b..f2a6f4f303 100644 --- a/core/src/main/scala/spark/ui/jobs/StagePage.scala +++ b/core/src/main/scala/spark/ui/jobs/StagePage.scala @@ -123,16 +123,16 @@ private[spark] class StagePage(parent: JobProgressUI) { if (hasShuffleRead) shuffleReadQuantiles else Nil, if (hasShuffleWrite) shuffleWriteQuantiles else Nil) - val quantileHeaders = Seq("Metric", "Min (0th percentitle)", "25th percentile", - "50th percentile", "75th percentile", "Max (100th percentile)") + val quantileHeaders = Seq("Metric", "Min", "25th percentile", + "Median", "75th percentile", "Max") def quantileRow(data: Seq[String]): Seq[Node] = {data.map(d => )} - Some(listingTable(quantileHeaders, quantileRow, listings)) + Some(listingTable(quantileHeaders, quantileRow, listings, fixedWidth = true)) } val content = summary ++

Summary Metrics for {numCompleted} Completed Tasks

++ -
{summaryTable.getOrElse("No tasks have reported their execution metrics yet.")}
++ +
{summaryTable.getOrElse("No tasks have reported metrics yet.")}
++

Tasks

++ taskTable; headerSparkPage(content, parent.sc, "Details for Stage %d".format(stageId), Jobs)
Pool Name Minimum Share
{d}