diff --git a/core/src/main/resources/spark/ui/static/webui.css b/core/src/main/resources/spark/ui/static/webui.css
index 8b9f4ee93841d15f4fcda92013f274d755a181f2..fd2cbad004a8e96663941662dd223257ed9c4e8f 100644
--- a/core/src/main/resources/spark/ui/static/webui.css
+++ b/core/src/main/resources/spark/ui/static/webui.css
@@ -51,3 +51,27 @@
 .table td {
   vertical-align: middle !important;
 }
+
+.progress-completed .bar,
+.progress .bar-completed {
+  background-color: #b3def9;
+  background-image: -moz-linear-gradient(top, #addfff, #badcf2);
+  background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#addfff), to(#badcf2));
+  background-image: -webkit-linear-gradient(top, #addfff, #badcf2);
+  background-image: -o-linear-gradient(top, #addfff, #badcf2);
+  background-image: linear-gradient(to bottom, #addfff, #badcf2);
+  background-repeat: repeat-x;
+  filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffaddfff', endColorstr='#ffbadcf2', GradientType=0);
+}
+
+.progress-running .bar,
+.progress .bar-running {
+  background-color: #c2ebfa;
+  background-image: -moz-linear-gradient(top, #bdedff, #c7e8f5);
+  background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#bdedff), to(#c7e8f5));
+  background-image: -webkit-linear-gradient(top, #bdedff, #c7e8f5);
+  background-image: -o-linear-gradient(top, #bdedff, #c7e8f5);
+  background-image: linear-gradient(to bottom, #bdedff, #c7e8f5);
+  background-repeat: repeat-x;
+  filter: progid:dximagetransform.microsoft.gradient(startColorstr='#ffbdedff', endColorstr='#ffc7e8f5', GradientType=0);
+}
diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/spark/ui/UIUtils.scala
index cff26d3168b334c5535768838a522f29e92d9f9b..226fe49aaffcf93e17a13898340bff68d6bc015e 100644
--- a/core/src/main/scala/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/spark/ui/UIUtils.scala
@@ -28,14 +28,14 @@ private[spark] object UIUtils {
   /** Returns a spark page with correctly formatted headers */
   def headerSparkPage(content: => Seq[Node], sc: SparkContext, title: String, page: Page.Value)
   : Seq[Node] = {
-    val storage = page match {
-      case Storage => <li class="active"><a href="/storage">Storage</a></li>
-      case _ => <li><a href="/storage">Storage</a></li>
-    }
     val jobs = page match {
       case Jobs => <li class="active"><a href="/stages">Jobs</a></li>
       case _ => <li><a href="/stages">Jobs</a></li>
     }
+    val storage = page match {
+      case Storage => <li class="active"><a href="/storage">Storage</a></li>
+      case _ => <li><a href="/storage">Storage</a></li>
+    }
     val environment = page match {
       case Environment => <li class="active"><a href="/environment">Environment</a></li>
       case _ => <li><a href="/environment">Environment</a></li>
@@ -65,17 +65,14 @@ private[spark] object UIUtils {
               <div class="navbar">
                 <div class="navbar-inner">
                   <div class="container">
-                    <div class="brand"><img src="/static/spark-logo-77x50px-hd.png" /></div>
+                    <a href="/" class="brand"><img src="/static/spark-logo-77x50px-hd.png" /></a>
                     <ul class="nav">
-                      {storage}
                       {jobs}
+                      {storage}
                       {environment}
                       {executors}
                     </ul>
-                    <ul id="infolist">
-                      <li>Application: <strong>{sc.appName}</strong></li>
-                      <li>Executors: <strong>{sc.getExecutorStorageStatus.size}</strong></li>
-                    </ul>
+                    <p class="navbar-text pull-right">Application: <strong>{sc.appName}</strong></p>
                   </div>
                 </div>
               </div>
diff --git a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
index 5ae7935ed487a19a6c85c1508fe71929d91ccc5d..e98302611ee43b74419b16a3cd5d949b81721178 100644
--- a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
@@ -44,7 +44,7 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
       ("Java Home", Properties.javaHome),
       ("Scala Version", Properties.versionString),
       ("Scala Home", Properties.scalaHome)
-    )
+    ).sorted
     def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
     def jvmTable = UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation)
 
@@ -53,8 +53,8 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
         .filter{case (k, v) => k.contains("java.class.path")}
         .headOption
         .getOrElse("", "")
-    val sparkProperties = properties.filter(_._1.startsWith("spark"))
-    val otherProperties = properties.diff(sparkProperties :+ classPathProperty)
+    val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
+    val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
 
     val propertyHeaders = Seq("Name", "Value")
     def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
@@ -67,7 +67,7 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
         .map(e => (e, "System Classpath"))
     val addedJars = sc.addedJars.iterator.toSeq.map{case (path, time) => (path, "Added By User")}
     val addedFiles = sc.addedFiles.iterator.toSeq.map{case (path, time) => (path, "Added By User")}
-    val classPath = addedJars ++ addedFiles ++ classPathEntries
+    val classPath = (addedJars ++ addedFiles ++ classPathEntries).sorted
 
     val classPathHeaders = Seq("Resource", "Source")
     def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
diff --git a/core/src/main/scala/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/spark/ui/jobs/IndexPage.scala
index 2da2155e090ac2171fa9d2844c89f4da89cd54d9..b611b0fe85b08ea79653a71ae64655629b359c0e 100644
--- a/core/src/main/scala/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/spark/ui/jobs/IndexPage.scala
@@ -41,9 +41,9 @@ private[spark] class IndexPage(parent: JobProgressUI) {
       activeTime += t.timeRunning(now)
     }
 
-    val activeStagesTable = new StageTable(activeStages, parent)
-    val completedStagesTable = new StageTable(completedStages, parent)
-    val failedStagesTable = new StageTable(failedStages, parent)
+    val activeStagesTable = new StageTable(activeStages.sortBy(_.submissionTime).reverse, parent)
+    val completedStagesTable = new StageTable(completedStages.sortBy(_.submissionTime).reverse, parent)
+    val failedStagesTable = new StageTable(failedStages.sortBy(_.submissionTime).reverse, parent)
 
     val poolTable = new PoolTable(listener.sc.getAllPools, listener)
     val summary: NodeSeq =
@@ -69,11 +69,19 @@ private[spark] class IndexPage(parent: JobProgressUI) {
               {Utils.memoryBytesToString(listener.totalShuffleWrite)}
             </li>
          }
-          <li><strong>Active Stages Number:</strong> {activeStages.size} </li>
-          <li><strong>Completed Stages Number:</strong> {completedStages.size} </li>
-          <li><strong>Failed Stages Number:</strong> {failedStages.size} </li>
-          <li><strong>Scheduling Mode:</strong> {parent.sc.getSchedulingMode}</li>
-
+         <li>
+           <a href="#active"><strong>Active Stages:</strong></a>
+           {activeStages.size}
+         </li>
+         <li>
+           <a href="#completed"><strong>Completed Stages:</strong></a>
+           {completedStages.size}
+         </li>
+         <li>
+           <a href="#failed"><strong>Failed Stages:</strong></a>
+           {failedStages.size}
+         </li>
+         <li><strong>Scheduling Mode:</strong> {parent.sc.getSchedulingMode}</li>
        </ul>
      </div>
 
@@ -83,11 +91,11 @@ private[spark] class IndexPage(parent: JobProgressUI) {
       } else {
         Seq()
       }} ++
-      <h3>Active Stages : {activeStages.size}</h3> ++
+      <h3 id="active">Active Stages : {activeStages.size}</h3> ++
       activeStagesTable.toNodeSeq++
-      <h3>Completed Stages : {completedStages.size}</h3> ++
+      <h3 id="completed">Completed Stages : {completedStages.size}</h3> ++
       completedStagesTable.toNodeSeq++
-      <h3>Failed Stages : {failedStages.size}</h3> ++
+      <h3 id ="failed">Failed Stages : {failedStages.size}</h3> ++
       failedStagesTable.toNodeSeq
 
     headerSparkPage(content, parent.sc, "Spark Stages", Jobs)
diff --git a/core/src/main/scala/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/spark/ui/jobs/PoolPage.scala
index ee5a6a6a48bec821fce2ec5e93b302f30aadffff..647c6d2ae3edb836e75f806073d1141c02dee0ea 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/spark/ui/jobs/PoolPage.scala
@@ -17,7 +17,7 @@ private[spark] class PoolPage(parent: JobProgressUI) {
     val poolName = request.getParameter("poolname")
     val poolToActiveStages = listener.poolToActiveStages
     val activeStages = poolToActiveStages.getOrElseUpdate(poolName, new HashSet[Stage]).toSeq
-    val activeStagesTable = new StageTable(activeStages, parent)
+    val activeStagesTable = new StageTable(activeStages.sortBy(_.submissionTime).reverse, parent)
 
     val pool = listener.sc.getPoolForName(poolName).get
     val poolTable = new PoolTable(Seq(pool), listener)
diff --git a/core/src/main/scala/spark/ui/jobs/StageTable.scala b/core/src/main/scala/spark/ui/jobs/StageTable.scala
index 38fa3bcbcd17369bf69e64a74ad5a0709dae1af3..1df0e0913c6755cee8ecc5d5cf7b146a65fd8d7c 100644
--- a/core/src/main/scala/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/spark/ui/jobs/StageTable.scala
@@ -37,10 +37,9 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
         <th>Description</th>
         <th>Submitted</th>
         <td>Duration</td>
-        <td colspan="2">Tasks: Complete/Total</td>
+        <td>Tasks: Succeeded/Total</td>
         <td>Shuffle Read</td>
         <td>Shuffle Write</td>
-        <td>Stored RDD</td>
       </thead>
       <tbody>
         {rows.map(r => makeRow(r))}
@@ -55,13 +54,16 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
     }
   }
 
-  def makeProgressBar(started: Int, completed: Int, total: Int): Seq[Node] = {
+  def makeProgressBar(started: Int, completed: Int, failed: String, total: Int): Seq[Node] = {
     val completeWidth = "width: %s%%".format((completed.toDouble/total)*100)
     val startWidth = "width: %s%%".format((started.toDouble/total)*100)
 
-    <div class="progress" style="height: 15px; margin-bottom: 0px">
-      <div class="bar" style={completeWidth}></div>
-      <div class="bar bar-info" style={startWidth}></div>
+    <div class="progress" style="height: 15px; margin-bottom: 0px; position: relative">
+      <span style="text-align:center; position:absolute; width:100%;">
+        {completed}/{total} {failed}
+      </span>
+      <div class="bar bar-completed" style={completeWidth}></div>
+      <div class="bar bar-running" style={startWidth}></div>
     </div>
   }
 
@@ -83,6 +85,10 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
 
     val startedTasks = listener.stageToTasksActive.getOrElse(s.id, HashSet[TaskInfo]()).size
     val completedTasks = listener.stageToTasksComplete.getOrElse(s.id, 0)
+    val failedTasks = listener.stageToTasksFailed.getOrElse(s.id, 0) match {
+        case f if f > 0 => "(%s failed)".format(f)
+        case _ => ""
+    }
     val totalTasks = s.numPartitions
 
     val poolName = listener.stageToPool.get(s)
@@ -100,21 +106,11 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU
       <td valign="middle">{submissionTime}</td>
       <td>{getElapsedTime(s.submissionTime,
              s.completionTime.getOrElse(System.currentTimeMillis()))}</td>
-      <td class="progress-cell">{makeProgressBar(startedTasks, completedTasks, totalTasks)}</td>
-      <td style="border-left: 0; text-align: center;">{completedTasks} / {totalTasks}
-        {listener.stageToTasksFailed.getOrElse(s.id, 0) match {
-        case f if f > 0 => "(%s failed)".format(f)
-        case _ =>
-      }}
+      <td class="progress-cell">
+        {makeProgressBar(startedTasks, completedTasks, failedTasks, totalTasks)}
       </td>
       <td>{shuffleRead}</td>
       <td>{shuffleWrite}</td>
-      <td>{if (s.rdd.getStorageLevel != StorageLevel.NONE) {
-             <a href={"/storage/rdd?id=%s".format(s.rdd.id)}>
-               {Option(s.rdd.name).getOrElse(s.rdd.id)}
-             </a>
-          }}
-      </td>
     </tr>
   }
 }
diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/spark/ui/storage/RDDPage.scala
index 003be54ad8ec9f44f79de7a8906a925ad2c4a5f3..cd828778a6f247b516c44ce6e15947d84c8e4436 100644
--- a/core/src/main/scala/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/spark/ui/storage/RDDPage.scala
@@ -83,18 +83,19 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
       <hr/>
       <div class="row">
         <div class="span12">
+          <h3> Data Distribution Summary </h3>
           {workerTable}
         </div>
       </div>
       <hr/>
       <div class="row">
         <div class="span12">
-          <h3> RDD Summary </h3>
+          <h3> Partitions </h3>
           {blockTable}
         </div>
       </div>;
 
-    headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Jobs)
+    headerSparkPage(content, parent.sc, "RDD Info: " + rddInfo.name, Storage)
   }
 
   def blockRow(row: (String, BlockStatus, Seq[String])): Seq[Node] = {