diff --git a/core/src/main/scala/spark/storage/StorageUtils.scala b/core/src/main/scala/spark/storage/StorageUtils.scala
index 950c0cdf352f736b157a7a05d71563b95371f377..aca16f533aa0eeb76c7e39b1f3cb8ab272a15d17 100644
--- a/core/src/main/scala/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/spark/storage/StorageUtils.scala
@@ -39,12 +39,19 @@ case class RDDInfo(id: Int, name: String, storageLevel: StorageLevel,
 private[spark]
 object StorageUtils {
 
-  /* Given the current storage status of the BlockManager, returns information for each RDD */
-  def rddInfoFromStorageStatus(storageStatusList: Array[StorageStatus],
+  /* Returns RDD-level information, compiled from a list of StorageStatus objects */
+  def rddInfoFromStorageStatus(storageStatusList: Seq[StorageStatus],
     sc: SparkContext) : Array[RDDInfo] = {
     rddInfoFromBlockStatusList(storageStatusList.flatMap(_.blocks).toMap, sc)
   }
 
+  /* Returns a map of blocks to their locations, compiled from a list of StorageStatus objects */
+  def blockLocationsFromStorageStatus(storageStatusList: Seq[StorageStatus]) = {
+    val blockLocationPairs = storageStatusList
+      .flatMap(s => s.blocks.map(b => (b._1, s.blockManagerId.hostPort)))
+    blockLocationPairs.groupBy(_._1).map{case (k, v) => (k, v.unzip._2)}.toMap
+  }
+
   /* Given a list of BlockStatus objets, returns information for each RDD */
   def rddInfoFromBlockStatusList(infos: Map[String, BlockStatus],
     sc: SparkContext) : Array[RDDInfo] = {
diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/spark/ui/storage/RDDPage.scala
index 0cb1e47ea551eafb0604c092dbaf5ce5439a0b03..428db6fa95670f1e76c5e5fd15283d9066bb1f6c 100644
--- a/core/src/main/scala/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/spark/ui/storage/RDDPage.scala
@@ -26,8 +26,14 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
     val workers = filteredStorageStatusList.map((prefix, _))
     val workerTable = listingTable(workerHeaders, workerRow, workers)
 
-    val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk")
-    val blocks = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1)
+    val blockHeaders = Seq("Block Name", "Storage Level", "Size in Memory", "Size on Disk",
+      "Locations")
+
+    val blockStatuses = filteredStorageStatusList.flatMap(_.blocks).toArray.sortWith(_._1 < _._1)
+    val blockLocations = StorageUtils.blockLocationsFromStorageStatus(filteredStorageStatusList)
+    val blocks = blockStatuses.map {
+      case(id, status) => (id, status, blockLocations.get(id).getOrElse(Seq("UNKNOWN")))
+    }
     val blockTable = listingTable(blockHeaders, blockRow, blocks)
 
     val content =
@@ -74,8 +80,8 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
     headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Jobs)
   }
 
-  def blockRow(blk: (String, BlockStatus)): Seq[Node] = {
-    val (id, block) = blk
+  def blockRow(row: (String, BlockStatus, Seq[String])): Seq[Node] = {
+    val (id, block, locations) = row
     <tr>
       <td>{id}</td>
       <td>
@@ -87,6 +93,9 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
       <td sorttable_customkey={block.diskSize.toString}>
         {Utils.memoryBytesToString(block.diskSize)}
       </td>
+      <td>
+        {locations.map(l => <span>{l}<br/></span>)}
+      </td>
     </tr>
   }