Skip to content
Snippets Groups Projects
Commit 1a13460c authored by Reynold Xin's avatar Reynold Xin
Browse files

Merge pull request #833 from rxin/ui

Various UI improvements.
parents 044a088c 1a51deae
No related branches found
No related tags found
No related merge requests found
Showing
with 115 additions and 97 deletions
......@@ -5,10 +5,6 @@
padding: 0;
}
body {
font-size: 15px !important;
}
.version {
line-height: 30px;
vertical-align: bottom;
......@@ -53,6 +49,10 @@ body {
line-height: 15px !important;
}
.table-fixed {
table-layout:fixed;
}
.table td {
vertical-align: middle !important;
}
......
......@@ -53,7 +53,7 @@ private[spark] class IndexPage(parent: MasterWebUI) {
val workers = state.workers.sortBy(_.id)
val workerTable = UIUtils.listingTable(workerHeaders, workerRow, workers)
val appHeaders = Seq("ID", "Description", "Cores", "Memory per Node", "Submit Time", "User",
val appHeaders = Seq("ID", "Name", "Cores", "Memory per Node", "Submitted Time", "User",
"State", "Duration")
val activeApps = state.activeApps.sortBy(_.startTime).reverse
val activeAppsTable = UIUtils.listingTable(appHeaders, appRow, activeApps)
......
......@@ -17,21 +17,21 @@
package spark.ui
import annotation.tailrec
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
import scala.annotation.tailrec
import scala.util.{Try, Success, Failure}
import scala.xml.Node
import net.liftweb.json.{JValue, pretty, render}
import org.eclipse.jetty.server.{Server, Request, Handler}
import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
import org.eclipse.jetty.util.thread.QueuedThreadPool
import scala.util.{Try, Success, Failure}
import scala.xml.Node
import spark.Logging
/** Utilities for launching a web server using Jetty's HTTP Server class */
private[spark] object JettyUtils extends Logging {
// Base type for a function that returns something based on an HTTP request. Allows for
......
......@@ -125,9 +125,21 @@ private[spark] object UIUtils {
}
/** Returns an HTML table constructed by generating a row for each object in a sequence. */
def listingTable[T](headers: Seq[String], makeRow: T => Seq[Node], rows: Seq[T]): Seq[Node] = {
<table class="table table-bordered table-striped table-condensed sortable">
<thead>{headers.map(h => <th>{h}</th>)}</thead>
def listingTable[T](
headers: Seq[String],
makeRow: T => Seq[Node],
rows: Seq[T],
fixedWidth: Boolean = false): Seq[Node] = {
val colWidth = 100.toDouble / headers.size
val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
var tableClass = "table table-bordered table-striped table-condensed sortable"
if (fixedWidth) {
tableClass += " table-fixed"
}
<table class={tableClass}>
<thead>{headers.map(h => <th width={colWidthAttr}>{h}</th>)}</thead>
<tbody>
{rows.map(r => makeRow(r))}
</tbody>
......
......@@ -22,7 +22,8 @@ import scala.util.Random
import spark.SparkContext
import spark.SparkContext._
import spark.scheduler.cluster.SchedulingMode
import spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
* Continuously generates jobs that expose various features of the WebUI (internal testing tool).
*
......
......@@ -19,18 +19,17 @@ package spark.ui.env
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.Handler
import scala.collection.JavaConversions._
import scala.util.Properties
import scala.xml.Node
import org.eclipse.jetty.server.Handler
import spark.ui.JettyUtils._
import spark.ui.UIUtils.headerSparkPage
import spark.ui.UIUtils
import spark.ui.Page.Environment
import spark.SparkContext
import spark.ui.UIUtils
import scala.xml.Node
private[spark] class EnvironmentUI(sc: SparkContext) {
......@@ -46,20 +45,22 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
("Scala Home", Properties.scalaHome)
).sorted
def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
def jvmTable = UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation)
def jvmTable =
UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true)
val properties = System.getProperties.iterator.toSeq
val classPathProperty = properties
.filter{case (k, v) => k.contains("java.class.path")}
.headOption
.getOrElse("", "")
val classPathProperty = properties.find { case (k, v) =>
k.contains("java.class.path")
}.getOrElse(("", ""))
val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
val propertyHeaders = Seq("Name", "Value")
def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
val sparkPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties)
val otherPropertyTable = UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties)
val sparkPropertyTable =
UIUtils.listingTable(propertyHeaders, propertyRow, sparkProperties, fixedWidth = true)
val otherPropertyTable =
UIUtils.listingTable(propertyHeaders, propertyRow, otherProperties, fixedWidth = true)
val classPathEntries = classPathProperty._2
.split(System.getProperty("path.separator", ":"))
......@@ -71,16 +72,23 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
val classPathHeaders = Seq("Resource", "Source")
def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
val classPathTable = UIUtils.listingTable(classPathHeaders, classPathRow, classPath)
val classPathTable =
UIUtils.listingTable(classPathHeaders, classPathRow, classPath, fixedWidth = true)
val content =
<span>
<h4>Runtime Information</h4> {jvmTable}
<h4>Spark Properties</h4> {sparkPropertyTable}
<h4>System Properties</h4> {otherPropertyTable}
<h4>Classpath Entries</h4> {classPathTable}
<hr/>
<h4>{sparkProperties.size} Spark Properties</h4>
{sparkPropertyTable}
<hr/>
<h4>{otherProperties.size} System Properties</h4>
{otherPropertyTable}
<hr/>
<h4>{classPath.size} Classpath Entries</h4>
{classPathTable}
</span>
headerSparkPage(content, sc, "Environment", Environment)
UIUtils.headerSparkPage(content, sc, "Environment", Environment)
}
}
package spark.ui.exec
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.Handler
import scala.collection.mutable.{HashMap, HashSet}
import scala.xml.Node
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.util.Properties
import org.eclipse.jetty.server.Handler
import spark.{ExceptionFailure, Logging, SparkContext, Success, Utils}
import spark.{ExceptionFailure, Logging, Utils, SparkContext}
import spark.executor.TaskMetrics
import spark.scheduler.cluster.TaskInfo
import spark.scheduler._
import spark.SparkContext
import spark.storage.{StorageStatus, StorageUtils}
import spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
import spark.ui.JettyUtils._
import spark.ui.Page.Executors
import spark.ui.UIUtils.headerSparkPage
import spark.ui.UIUtils
import scala.xml.{Node, XML}
private[spark] class ExecutorsUI(val sc: SparkContext) {
......@@ -44,7 +39,8 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
val execHead = Seq("Executor ID", "Address", "RDD blocks", "Memory used", "Disk used",
"Active tasks", "Failed tasks", "Complete tasks", "Total tasks")
def execRow(kv: Seq[String]) =
def execRow(kv: Seq[String]) = {
<tr>
<td>{kv(0)}</td>
<td>{kv(1)}</td>
......@@ -60,9 +56,9 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
<td>{kv(8)}</td>
<td>{kv(9)}</td>
</tr>
val execInfo =
for (b <- 0 until storageStatusList.size)
yield getExecInfo(b)
}
val execInfo = for (b <- 0 until storageStatusList.size) yield getExecInfo(b)
val execTable = UIUtils.listingTable(execHead, execRow, execInfo)
val content =
......@@ -82,7 +78,7 @@ private[spark] class ExecutorsUI(val sc: SparkContext) {
</div>
</div>;
headerSparkPage(content, sc, "Executors", Executors)
UIUtils.headerSparkPage(content, sc, execInfo.size + " Executors", Executors)
}
def getExecInfo(a: Int): Seq[String] = {
......
......@@ -24,7 +24,7 @@ import scala.xml.{NodeSeq, Node}
import spark.scheduler.cluster.SchedulingMode
import spark.ui.Page._
import spark.ui.UIUtils._
import spark.Utils
/** Page showing list of all ongoing and recently finished stages and pools*/
private[spark] class IndexPage(parent: JobProgressUI) {
......@@ -46,7 +46,8 @@ private[spark] class IndexPage(parent: JobProgressUI) {
val completedStagesTable = new StageTable(completedStages.sortBy(_.submissionTime).reverse, parent)
val failedStagesTable = new StageTable(failedStages.sortBy(_.submissionTime).reverse, parent)
val poolTable = new PoolTable(listener.sc.getAllPools, listener)
val pools = listener.sc.getAllPools
val poolTable = new PoolTable(pools, listener)
val summary: NodeSeq =
<div>
<ul class="unstyled">
......@@ -76,15 +77,15 @@ private[spark] class IndexPage(parent: JobProgressUI) {
val content = summary ++
{if (listener.sc.getSchedulingMode == SchedulingMode.FAIR) {
<h4>Pools</h4> ++ poolTable.toNodeSeq
<hr/><h4>{pools.size} Fair Scheduler Pools</h4> ++ poolTable.toNodeSeq
} else {
Seq()
}} ++
<h4 id="active">Active Stages: {activeStages.size}</h4> ++
<hr/><h4 id="active">{activeStages.size} Active Stages</h4> ++
activeStagesTable.toNodeSeq++
<h4 id="completed">Completed Stages: {completedStages.size}</h4> ++
<hr/><h4 id="completed">{completedStages.size} Completed Stages</h4> ++
completedStagesTable.toNodeSeq++
<h4 id ="failed">Failed Stages: {failedStages.size}</h4> ++
<hr/><h4 id ="failed">{failedStages.size} Failed Stages</h4> ++
failedStagesTable.toNodeSeq
headerSparkPage(content, parent.sc, "Spark Stages", Jobs)
......
......@@ -23,10 +23,11 @@ private[spark] class PoolPage(parent: JobProgressUI) {
val pool = listener.sc.getPoolForName(poolName).get
val poolTable = new PoolTable(Seq(pool), listener)
val content = <h3>Pool </h3> ++ poolTable.toNodeSeq() ++
<h3>Active Stages : {activeStages.size}</h3> ++ activeStagesTable.toNodeSeq()
val content = <h4>Summary </h4> ++ poolTable.toNodeSeq() ++
<hr/>
<h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq()
headerSparkPage(content, parent.sc, "Spark Pool Details", Jobs)
headerSparkPage(content, parent.sc, "Fair Scheduler Pool: " + poolName, Jobs)
}
}
}
package spark.ui.jobs
import scala.xml.Node
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import scala.xml.Node
import spark.scheduler.Stage
import spark.scheduler.cluster.Schedulable
......@@ -21,14 +21,14 @@ private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressLis
private def poolTable(makeRow: (Schedulable, HashMap[String, HashSet[Stage]]) => Seq[Node],
rows: Seq[Schedulable]
): Seq[Node] = {
<table class="table table-bordered table-striped table-condensed sortable">
<table class="table table-bordered table-striped table-condensed sortable table-fixed">
<thead>
<th>Pool Name</th>
<th>Minimum Share</th>
<th>Pool Weight</th>
<td>Active Stages</td>
<td>Running Tasks</td>
<td>SchedulingMode</td>
<th>Active Stages</th>
<th>Running Tasks</th>
<th>SchedulingMode</th>
</thead>
<tbody>
{rows.map(r => makeRow(r, poolToActiveStages))}
......@@ -36,7 +36,8 @@ private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressLis
</table>
}
private def poolRow(p: Schedulable, poolToActiveStages: HashMap[String, HashSet[Stage]]): Seq[Node] = {
private def poolRow(p: Schedulable, poolToActiveStages: HashMap[String, HashSet[Stage]])
: Seq[Node] = {
val activeStages = poolToActiveStages.get(p.name) match {
case Some(stages) => stages.size
case None => 0
......
......@@ -46,11 +46,12 @@ private[spark] class StagePage(parent: JobProgressUI) {
<h4>Summary Metrics</h4> No tasks have started yet
<h4>Tasks</h4> No tasks have started yet
</div>
return headerSparkPage(content, parent.sc, "Stage Details: %s".format(stageId), Jobs)
return headerSparkPage(content, parent.sc, "Details for Stage %s".format(stageId), Jobs)
}
val tasks = listener.stageToTaskInfos(stageId).toSeq.sortBy(_._1.launchTime)
val numCompleted = tasks.count(_._1.finished)
val shuffleReadBytes = listener.stageToShuffleRead.getOrElse(stageId, 0L)
val hasShuffleRead = shuffleReadBytes > 0
val shuffleWriteBytes = listener.stageToShuffleWrite.getOrElse(stageId, 0L)
......@@ -82,11 +83,11 @@ private[spark] class StagePage(parent: JobProgressUI) {
</div>
val taskHeaders: Seq[String] =
Seq("Task ID", "Status", "Duration", "Locality Level", "Worker", "Launch Time") ++
{if (hasShuffleRead) Seq("Shuffle Read") else Nil} ++
{if (hasShuffleWrite) Seq("Shuffle Write") else Nil} ++
Seq("Task ID", "Status", "Locality Level", "Executor", "Launch Time", "Duration") ++
Seq("GC Time") ++
Seq("Details")
{if (hasShuffleRead) Seq("Shuffle Read") else Nil} ++
{if (hasShuffleWrite) Seq("Shuffle Write") else Nil} ++
Seq("Errors")
val taskTable = listingTable(taskHeaders, taskRow(hasShuffleRead, hasShuffleWrite), tasks)
......@@ -122,16 +123,19 @@ private[spark] class StagePage(parent: JobProgressUI) {
if (hasShuffleRead) shuffleReadQuantiles else Nil,
if (hasShuffleWrite) shuffleWriteQuantiles else Nil)
val quantileHeaders = Seq("Metric", "Min", "25%", "50%", "75%", "Max")
val quantileHeaders = Seq("Metric", "Min", "25th percentile",
"Median", "75th percentile", "Max")
def quantileRow(data: Seq[String]): Seq[Node] = <tr> {data.map(d => <td>{d}</td>)} </tr>
Some(listingTable(quantileHeaders, quantileRow, listings))
Some(listingTable(quantileHeaders, quantileRow, listings, fixedWidth = true))
}
val content =
summary ++ <h2>Summary Metrics</h2> ++ summaryTable.getOrElse(Nil) ++
<h2>Tasks</h2> ++ taskTable;
summary ++
<h4>Summary Metrics for {numCompleted} Completed Tasks</h4> ++
<div>{summaryTable.getOrElse("No tasks have reported metrics yet.")}</div> ++
<hr/><h4>Tasks</h4> ++ taskTable;
headerSparkPage(content, parent.sc, "Stage Details: %s".format(stageId), Jobs)
headerSparkPage(content, parent.sc, "Details for Stage %d".format(stageId), Jobs)
}
}
......@@ -151,12 +155,15 @@ private[spark] class StagePage(parent: JobProgressUI) {
<tr>
<td>{info.taskId}</td>
<td>{info.status}</td>
<td sorttable_customkey={duration.toString}>
{formatDuration}
</td>
<td>{info.taskLocality}</td>
<td>{info.hostPort}</td>
<td>{dateFmt.format(new Date(info.launchTime))}</td>
<td sorttable_customkey={duration.toString}>
{formatDuration}
</td>
<td sorttable_customkey={gcTime.toString}>
{if (gcTime > 0) parent.formatDuration(gcTime) else ""}
</td>
{if (shuffleRead) {
<td>{metrics.flatMap{m => m.shuffleReadMetrics}.map{s =>
Utils.memoryBytesToString(s.remoteBytesRead)}.getOrElse("")}</td>
......@@ -165,9 +172,6 @@ private[spark] class StagePage(parent: JobProgressUI) {
<td>{metrics.flatMap{m => m.shuffleWriteMetrics}.map{s =>
Utils.memoryBytesToString(s.shuffleBytesWritten)}.getOrElse("")}</td>
}}
<td sorttable_customkey={gcTime.toString}>
{if (gcTime > 0) parent.formatDuration(gcTime) else ""}
</td>
<td>{exception.map(e =>
<span>
{e.className} ({e.description})<br/>
......
package spark.ui.jobs
import java.util.Date
import java.text.SimpleDateFormat
import javax.servlet.http.HttpServletRequest
import scala.Some
import scala.xml.{NodeSeq, Node}
import scala.collection.mutable.HashMap
import scala.xml.Node
import scala.collection.mutable.HashSet
import spark.Utils
import spark.scheduler.cluster.{SchedulingMode, TaskInfo}
import spark.scheduler.Stage
import spark.ui.UIUtils._
import spark.ui.Page._
import spark.Utils
import spark.storage.StorageLevel
/** Page showing list of all ongoing and recently finished stages */
private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressUI) {
......@@ -38,10 +31,10 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
{if (isFairScheduler) {<th>Pool Name</th>} else {}}
<th>Description</th>
<th>Submitted</th>
<td>Duration</td>
<td>Tasks: Succeeded/Total</td>
<td>Shuffle Read</td>
<td>Shuffle Write</td>
<th>Duration</th>
<th>Tasks: Succeeded/Total</th>
<th>Shuffle Read</th>
<th>Shuffle Write</th>
</thead>
<tbody>
{rows.map(r => makeRow(r))}
......
......@@ -21,12 +21,13 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import spark.storage.{StorageStatus, StorageUtils}
import spark.ui.UIUtils._
import spark.Utils
import spark.storage.{StorageStatus, StorageUtils}
import spark.storage.BlockManagerMasterActor.BlockStatus
import spark.ui.UIUtils._
import spark.ui.Page._
/** Page showing storage details for a given RDD */
private[spark] class RDDPage(parent: BlockManagerUI) {
val sc = parent.sc
......@@ -44,7 +45,7 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
val workerTable = listingTable(workerHeaders, workerRow, workers)
val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk",
"Locations")
"Executors")
val blockStatuses = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1)
val blockLocations = StorageUtils.blockLocationsFromStorageStatus(filteredStorageStatusList)
......@@ -83,19 +84,19 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
<hr/>
<div class="row">
<div class="span12">
<h3> Data Distribution Summary </h3>
<h4> Data Distribution on {workers.size} Executors </h4>
{workerTable}
</div>
</div>
<hr/>
<div class="row">
<div class="span12">
<h4> Partitions </h4>
<h4> {blocks.size} Partitions </h4>
{blockTable}
</div>
</div>;
headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Storage)
headerSparkPage(content, parent.sc, "RDD Storage Info for " + rddInfo.name, Storage)
}
def blockRow(row: (String, BlockStatus, Seq[String])): Seq[Node] = {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment