Skip to content
Snippets Groups Projects
Commit 6855338e authored by Patrick Wendell's avatar Patrick Wendell
Browse files

Show block locations in Web UI.

This fixes SPARK-769. Support is added for enumerating the locations of blocks
in the UI. There is also some minor cleanup in StorageUtils.
parent 018d04c6
No related branches found
No related tags found
No related merge requests found
...@@ -39,12 +39,19 @@ case class RDDInfo(id: Int, name: String, storageLevel: StorageLevel, ...@@ -39,12 +39,19 @@ case class RDDInfo(id: Int, name: String, storageLevel: StorageLevel,
private[spark] private[spark]
object StorageUtils { object StorageUtils {
/* Given the current storage status of the BlockManager, returns information for each RDD */ /* Returns RDD-level information, compiled from a list of StorageStatus objects */
def rddInfoFromStorageStatus(storageStatusList: Array[StorageStatus], def rddInfoFromStorageStatus(storageStatusList: Seq[StorageStatus],
sc: SparkContext) : Array[RDDInfo] = { sc: SparkContext) : Array[RDDInfo] = {
rddInfoFromBlockStatusList(storageStatusList.flatMap(_.blocks).toMap, sc) rddInfoFromBlockStatusList(storageStatusList.flatMap(_.blocks).toMap, sc)
} }
/* Returns a map of blocks to their locations, compiled from a list of StorageStatus objects */
def blockLocationsFromStorageStatus(storageStatusList: Seq[StorageStatus]) = {
val blockLocationPairs = storageStatusList
.flatMap(s => s.blocks.map(b => (b._1, s.blockManagerId.hostPort)))
blockLocationPairs.groupBy(_._1).map{case (k, v) => (k, v.unzip._2)}.toMap
}
/* Given a list of BlockStatus objets, returns information for each RDD */ /* Given a list of BlockStatus objets, returns information for each RDD */
def rddInfoFromBlockStatusList(infos: Map[String, BlockStatus], def rddInfoFromBlockStatusList(infos: Map[String, BlockStatus],
sc: SparkContext) : Array[RDDInfo] = { sc: SparkContext) : Array[RDDInfo] = {
......
...@@ -26,8 +26,14 @@ private[spark] class RDDPage(parent: BlockManagerUI) { ...@@ -26,8 +26,14 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
val workers = filteredStorageStatusList.map((prefix, _)) val workers = filteredStorageStatusList.map((prefix, _))
val workerTable = listingTable(workerHeaders, workerRow, workers) val workerTable = listingTable(workerHeaders, workerRow, workers)
val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk") val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk",
val blocks = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1) "Locations")
val blockStatuses = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1)
val blockLocations = StorageUtils.blockLocationsFromStorageStatus(filteredStorageStatusList)
val blocks = blockStatuses.map {
case(id, status) => (id, status, blockLocations.get(id).getOrElse(Seq("UNKNOWN")))
}
val blockTable = listingTable(blockHeaders, blockRow, blocks) val blockTable = listingTable(blockHeaders, blockRow, blocks)
val content = val content =
...@@ -74,8 +80,8 @@ private[spark] class RDDPage(parent: BlockManagerUI) { ...@@ -74,8 +80,8 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Jobs) headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Jobs)
} }
def blockRow(blk: (String, BlockStatus)): Seq[Node] = { def blockRow(row: (String, BlockStatus, Seq[String])): Seq[Node] = {
val (id, block) = blk val (id, block, locations) = row
<tr> <tr>
<td>{id}</td> <td>{id}</td>
<td> <td>
...@@ -87,6 +93,9 @@ private[spark] class RDDPage(parent: BlockManagerUI) { ...@@ -87,6 +93,9 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
<td sorttable_customkey={block.diskSize.toString}> <td sorttable_customkey={block.diskSize.toString}>
{Utils.memoryBytesToString(block.diskSize)} {Utils.memoryBytesToString(block.diskSize)}
</td> </td>
<td>
{locations.map(l => <span>{l}<br/></span>)}
</td>
</tr> </tr>
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment