diff --git a/core/src/main/resources/org/apache/spark/ui/static/additional-metrics.js b/core/src/main/resources/org/apache/spark/ui/static/additional-metrics.js
new file mode 100644
index 0000000000000000000000000000000000000000..c5936b5038ac929e95786b0647eab7ccac5770a6
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/ui/static/additional-metrics.js
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Register functions to show/hide columns based on checkboxes. These need
+ * to be registered after the page loads. */
+$(function() {
+    $("span.expand-additional-metrics").click(function(){
+        // Expand the list of additional metrics.
+        var additionalMetricsDiv = $(this).parent().find('.additional-metrics');
+        $(additionalMetricsDiv).toggleClass('collapsed');
+
+        // Switch the class of the arrow from open to closed.
+        $(this).find('.expand-additional-metrics-arrow').toggleClass('arrow-open');
+        $(this).find('.expand-additional-metrics-arrow').toggleClass('arrow-closed');
+
+        // If clicking caused the metrics to expand, automatically check all options for additional
+        // metrics (don't trigger a click when collapsing metrics, because it leads to weird
+        // toggling behavior).
+        if (!$(additionalMetricsDiv).hasClass('collapsed')) {
+            $(this).parent().find('input:checkbox:not(:checked)').trigger('click');
+        }
+    });
+
+    $("input:checkbox:not(:checked)").each(function() {
+        var column = "table ." + $(this).attr("name");
+        $(column).hide();
+    });
+
+    $("input:checkbox").click(function() {
+        var column = "table ." + $(this).attr("name");
+        $(column).toggle();
+        stripeTables();
+    });
+
+    // Trigger a click on the checkbox if a user clicks the label next to it.
+    $("span.additional-metric-title").click(function() {
+        $(this).parent().find('input:checkbox').trigger('click');
+    });
+});
diff --git a/core/src/main/resources/org/apache/spark/ui/static/table.js b/core/src/main/resources/org/apache/spark/ui/static/table.js
new file mode 100644
index 0000000000000000000000000000000000000000..32187ba6e8df090f7d8376b2c913f0ed7af014d9
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/ui/static/table.js
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* Adds background colors to stripe table rows. This is necessary (instead of using css or the
+ * table striping provided by bootstrap) to appropriately stripe tables with hidden rows. */
+function stripeTables() {
+    $("table.table-striped-custom").each(function() {
+        $(this).find("tr:not(:hidden)").each(function (index) {
+           if (index % 2 == 1) {
+             $(this).css("background-color", "#f9f9f9");
+           } else {
+             $(this).css("background-color", "#ffffff");
+           }
+        });
+    });
+}
+
+/* Stripe all tables after pages finish loading. */
+$(function() {
+    stripeTables();
+});
diff --git a/core/src/main/resources/org/apache/spark/ui/static/webui.css b/core/src/main/resources/org/apache/spark/ui/static/webui.css
index 152bde5f6994f174b23a27fda0d2f1a5d1e38251..a2220e761ac98aec57e264396ae3a60396cfc109 100644
--- a/core/src/main/resources/org/apache/spark/ui/static/webui.css
+++ b/core/src/main/resources/org/apache/spark/ui/static/webui.css
@@ -120,7 +120,37 @@ pre {
   border: none;
 }
 
+span.expand-additional-metrics {
+  cursor: pointer;
+}
+
+span.additional-metric-title {
+  cursor: pointer;
+}
+
+.additional-metrics.collapsed {
+  display: none;
+}
+
 .tooltip {
  font-weight: normal;
 }
 
+.arrow-open {
+  width: 0;
+  height: 0;
+  border-left: 5px solid transparent;
+  border-right: 5px solid transparent;
+  border-top: 5px solid black;
+  float: left;
+  margin-top: 6px;
+}
+
+.arrow-closed {
+  width: 0;
+  height: 0;
+  border-top: 5px solid transparent;
+  border-bottom: 5px solid transparent;
+  border-left: 5px solid black;
+  display: inline-block;
+}
diff --git a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
index 9ced9b8107ebfa9ea7c21075861cb1314ca325ca..f02904df31fcfd49337ff8e65d93367beda109b6 100644
--- a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
+++ b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
@@ -31,4 +31,16 @@ private[spark] object ToolTips {
   val SHUFFLE_READ =
     """Bytes read from remote executors. Typically less than shuffle write bytes
        because this does not include shuffle data read locally."""
+
+  val GETTING_RESULT_TIME =
+    """Time that the driver spends fetching task results from workers. If this is large, consider
+       decreasing the amount of data returned from each task."""
+
+  val RESULT_SERIALIZATION_TIME =
+    """Time spent serializing the task result on the executor before sending it back to the
+       driver."""
+
+  val GC_TIME =
+    """Time that the executor spent paused for Java garbage collection while the task was
+       running."""
 }
diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 76714b1e6964f154f8854c828b083a7a2bcec415..3312671b6f885f4760c05247a41139c645d95dd9 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -20,13 +20,13 @@ package org.apache.spark.ui
 import java.text.SimpleDateFormat
 import java.util.{Locale, Date}
 
-import scala.xml.{Text, Node}
+import scala.xml.{Node, Text}
 
 import org.apache.spark.Logging
 
 /** Utility functions for generating XML pages with spark content. */
 private[spark] object UIUtils extends Logging {
-  val TABLE_CLASS = "table table-bordered table-striped table-condensed sortable"
+  val TABLE_CLASS = "table table-bordered table-striped-custom table-condensed sortable"
 
   // SimpleDateFormat is not thread-safe. Don't expose it to avoid improper use.
   private val dateFormat = new ThreadLocal[SimpleDateFormat]() {
@@ -160,6 +160,8 @@ private[spark] object UIUtils extends Logging {
     <script src={prependBaseUri("/static/jquery-1.11.1.min.js")}></script>
     <script src={prependBaseUri("/static/bootstrap-tooltip.js")}></script>
     <script src={prependBaseUri("/static/initialize-tooltips.js")}></script>
+    <script src={prependBaseUri("/static/table.js")}></script>
+    <script src={prependBaseUri("/static/additional-metrics.js")}></script>
   }
 
   /** Returns a spark page with correctly formatted headers */
@@ -240,7 +242,8 @@ private[spark] object UIUtils extends Logging {
       generateDataRow: T => Seq[Node],
       data: Iterable[T],
       fixedWidth: Boolean = false,
-      id: Option[String] = None): Seq[Node] = {
+      id: Option[String] = None,
+      headerClasses: Seq[String] = Seq.empty): Seq[Node] = {
 
     var listingTableClass = TABLE_CLASS
     if (fixedWidth) {
@@ -248,20 +251,29 @@ private[spark] object UIUtils extends Logging {
     }
     val colWidth = 100.toDouble / headers.size
     val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
-    val headerRow: Seq[Node] = {
-      // if none of the headers have "\n" in them
-      if (headers.forall(!_.contains("\n"))) {
-        // represent header as simple text
-        headers.map(h => <th width={colWidthAttr}>{h}</th>)
+
+    def getClass(index: Int): String = {
+      if (index < headerClasses.size) {
+        headerClasses(index)
       } else {
-        // represent header text as list while respecting "\n"
-        headers.map { case h =>
-          <th width={colWidthAttr}>
-            <ul class ="unstyled">
-              { h.split("\n").map { case t => <li> {t} </li> } }
-            </ul>
-          </th>
-        }
+        ""
+      }
+    }
+
+    val newlinesInHeader = headers.exists(_.contains("\n"))
+    def getHeaderContent(header: String): Seq[Node] = {
+      if (newlinesInHeader) {
+        <ul class="unstyled">
+          { header.split("\n").map { case t => <li> {t} </li> } }
+        </ul>
+      } else {
+        Text(header)
+      }
+    }
+
+    val headerRow: Seq[Node] = {
+      headers.view.zipWithIndex.map { x =>
+        <th width={colWidthAttr} class={getClass(x._2)}>{getHeaderContent(x._1)}</th>
       }
     }
     <table class={listingTableClass} id={id.map(Text.apply)}>
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index 2414e4c65237e91248b19b9bfcb82e9301dc36ee..961224a300a70085f9bf2b311ee167cec4ea2f1f 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -22,10 +22,11 @@ import javax.servlet.http.HttpServletRequest
 
 import scala.xml.{Node, Unparsed}
 
+import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.ui.{ToolTips, WebUIPage, UIUtils}
 import org.apache.spark.ui.jobs.UIData._
 import org.apache.spark.util.{Utils, Distribution}
-import org.apache.spark.scheduler.AccumulableInfo
+import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo}
 
 /** Page showing statistics and task list for a given stage */
 private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
@@ -57,7 +58,6 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
       val hasShuffleWrite = stageData.shuffleWriteBytes > 0
       val hasBytesSpilled = stageData.memoryBytesSpilled > 0 && stageData.diskBytesSpilled > 0
 
-      // scalastyle:off
       val summary =
         <div>
           <ul class="unstyled">
@@ -65,55 +65,102 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
               <strong>Total task time across all tasks: </strong>
               {UIUtils.formatDuration(stageData.executorRunTime)}
             </li>
-            {if (hasInput)
+            {if (hasInput) {
               <li>
                 <strong>Input: </strong>
                 {Utils.bytesToString(stageData.inputBytes)}
               </li>
-            }
-            {if (hasShuffleRead)
+            }}
+            {if (hasShuffleRead) {
               <li>
                 <strong>Shuffle read: </strong>
                 {Utils.bytesToString(stageData.shuffleReadBytes)}
               </li>
-            }
-            {if (hasShuffleWrite)
+            }}
+            {if (hasShuffleWrite) {
               <li>
                 <strong>Shuffle write: </strong>
                 {Utils.bytesToString(stageData.shuffleWriteBytes)}
               </li>
-            }
-            {if (hasBytesSpilled)
-            <li>
-              <strong>Shuffle spill (memory): </strong>
-              {Utils.bytesToString(stageData.memoryBytesSpilled)}
-            </li>
-            <li>
-              <strong>Shuffle spill (disk): </strong>
-              {Utils.bytesToString(stageData.diskBytesSpilled)}
-            </li>
-            }
+            }}
+            {if (hasBytesSpilled) {
+              <li>
+                <strong>Shuffle spill (memory): </strong>
+                {Utils.bytesToString(stageData.memoryBytesSpilled)}
+              </li>
+              <li>
+                <strong>Shuffle spill (disk): </strong>
+                {Utils.bytesToString(stageData.diskBytesSpilled)}
+              </li>
+            }}
           </ul>
         </div>
-        // scalastyle:on
+
+      val showAdditionalMetrics =
+        <div>
+          <span class="expand-additional-metrics">
+            <span class="expand-additional-metrics-arrow arrow-closed"></span>
+            <strong>Show additional metrics</strong>
+          </span>
+          <div class="additional-metrics collapsed">
+            <ul style="list-style-type:none">
+              <li>
+                <span data-toggle="tooltip"
+                      title={ToolTips.SCHEDULER_DELAY} data-placement="right">
+                  <input type="checkbox" name={TaskDetailsClassNames.SCHEDULER_DELAY}/>
+                  <span class="additional-metric-title">Scheduler Delay</span>
+                </span>
+              </li>
+              <li>
+                <span data-toggle="tooltip"
+                      title={ToolTips.GC_TIME} data-placement="right">
+                  <input type="checkbox" name={TaskDetailsClassNames.GC_TIME}/>
+                  <span class="additional-metric-title">GC Time</span>
+                </span>
+              </li>
+              <li>
+                <span data-toggle="tooltip"
+                      title={ToolTips.RESULT_SERIALIZATION_TIME} data-placement="right">
+                  <input type="checkbox" name={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}/>
+                  <span class="additional-metric-title">Result Serialization Time</span>
+                </span>
+              </li>
+              <li>
+                <span data-toggle="tooltip"
+                      title={ToolTips.GETTING_RESULT_TIME} data-placement="right">
+                  <input type="checkbox" name={TaskDetailsClassNames.GETTING_RESULT_TIME}/>
+                  <span class="additional-metric-title">Getting Result Time</span>
+                </span>
+              </li>
+            </ul>
+          </div>
+        </div>
+
       val accumulableHeaders: Seq[String] = Seq("Accumulable", "Value")
       def accumulableRow(acc: AccumulableInfo) = <tr><td>{acc.name}</td><td>{acc.value}</td></tr>
       val accumulableTable = UIUtils.listingTable(accumulableHeaders, accumulableRow,
         accumulables.values.toSeq)
 
-      val taskHeaders: Seq[String] =
+      val taskHeadersAndCssClasses: Seq[(String, String)] =
         Seq(
-          "Index", "ID", "Attempt", "Status", "Locality Level", "Executor ID / Host",
-          "Launch Time", "Duration", "GC Time", "Accumulators") ++
-        {if (hasInput) Seq("Input") else Nil} ++
-        {if (hasShuffleRead) Seq("Shuffle Read")  else Nil} ++
-        {if (hasShuffleWrite) Seq("Write Time", "Shuffle Write") else Nil} ++
-        {if (hasBytesSpilled) Seq("Shuffle Spill (Memory)", "Shuffle Spill (Disk)") else Nil} ++
-        Seq("Errors")
+          ("Index", ""), ("ID", ""), ("Attempt", ""), ("Status", ""), ("Locality Level", ""),
+          ("Executor ID / Host", ""), ("Launch Time", ""), ("Duration", ""), ("Accumulators", ""),
+          ("Scheduler Delay", TaskDetailsClassNames.SCHEDULER_DELAY),
+          ("GC Time", TaskDetailsClassNames.GC_TIME),
+          ("Result Serialization Time", TaskDetailsClassNames.RESULT_SERIALIZATION_TIME),
+          ("Getting Result Time", TaskDetailsClassNames.GETTING_RESULT_TIME)) ++
+        {if (hasInput) Seq(("Input", "")) else Nil} ++
+        {if (hasShuffleRead) Seq(("Shuffle Read", ""))  else Nil} ++
+        {if (hasShuffleWrite) Seq(("Write Time", ""), ("Shuffle Write", "")) else Nil} ++
+        {if (hasBytesSpilled) Seq(("Shuffle Spill (Memory)", ""), ("Shuffle Spill (Disk)", ""))
+          else Nil} ++
+        Seq(("Errors", ""))
+
+      val unzipped = taskHeadersAndCssClasses.unzip
 
       val taskTable = UIUtils.listingTable(
-        taskHeaders, taskRow(hasInput, hasShuffleRead, hasShuffleWrite, hasBytesSpilled), tasks)
-
+        unzipped._1, taskRow(hasInput, hasShuffleRead, hasShuffleWrite, hasBytesSpilled), tasks,
+        headerClasses = unzipped._2)
       // Excludes tasks which failed and have incomplete metrics
       val validTasks = tasks.filter(t => t.taskInfo.status == "SUCCESS" && t.taskMetrics.isDefined)
 
@@ -122,18 +169,37 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
           None
         }
         else {
-          val serializationTimes = validTasks.map { case TaskUIData(_, metrics, _) =>
-            metrics.get.resultSerializationTime.toDouble
+          def getFormattedTimeQuantiles(times: Seq[Double]): Seq[Node] = {
+            Distribution(times).get.getQuantiles().map { millis =>
+              <td>{UIUtils.formatDuration(millis.toLong)}</td>
+            }
           }
-          val serializationQuantiles =
-            <td>Result serialization time</td> +: Distribution(serializationTimes).
-              get.getQuantiles().map(ms => <td>{UIUtils.formatDuration(ms.toLong)}</td>)
 
           val serviceTimes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.executorRunTime.toDouble
           }
-          val serviceQuantiles = <td>Duration</td> +: Distribution(serviceTimes).get.getQuantiles()
-            .map(ms => <td>{UIUtils.formatDuration(ms.toLong)}</td>)
+          val serviceQuantiles = <td>Duration</td> +: getFormattedTimeQuantiles(serviceTimes)
+
+          val gcTimes = validTasks.map { case TaskUIData(_, metrics, _) =>
+            metrics.get.jvmGCTime.toDouble
+          }
+          val gcQuantiles =
+            <td>
+              <span data-toggle="tooltip"
+                  title={ToolTips.GC_TIME} data-placement="right">GC Time
+              </span>
+            </td> +: getFormattedTimeQuantiles(gcTimes)
+
+          val serializationTimes = validTasks.map { case TaskUIData(_, metrics, _) =>
+            metrics.get.resultSerializationTime.toDouble
+          }
+          val serializationQuantiles =
+            <td>
+              <span data-toggle="tooltip"
+                    title={ToolTips.RESULT_SERIALIZATION_TIME} data-placement="right">
+                Result Serialization Time
+              </span>
+            </td> +: getFormattedTimeQuantiles(serializationTimes)
 
           val gettingResultTimes = validTasks.map { case TaskUIData(info, _, _) =>
             if (info.gettingResultTime > 0) {
@@ -142,76 +208,75 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
               0.0
             }
           }
-          val gettingResultQuantiles = <td>Time spent fetching task results</td> +:
-            Distribution(gettingResultTimes).get.getQuantiles().map { millis =>
-              <td>{UIUtils.formatDuration(millis.toLong)}</td>
-            }
+          val gettingResultQuantiles =
+            <td>
+              <span data-toggle="tooltip"
+                  title={ToolTips.GETTING_RESULT_TIME} data-placement="right">
+                Getting Result Time
+              </span>
+            </td> +:
+            getFormattedTimeQuantiles(gettingResultTimes)
           // The scheduler delay includes the network delay to send the task to the worker
           // machine and to send back the result (but not the time to fetch the task result,
           // if it needed to be fetched from the block manager on the worker).
           val schedulerDelays = validTasks.map { case TaskUIData(info, metrics, _) =>
-            val totalExecutionTime = {
-              if (info.gettingResultTime > 0) {
-                (info.gettingResultTime - info.launchTime).toDouble
-              } else {
-                (info.finishTime - info.launchTime).toDouble
-              }
-            }
-            totalExecutionTime - metrics.get.executorRunTime
+            getSchedulerDelay(info, metrics.get).toDouble
           }
           val schedulerDelayTitle = <td><span data-toggle="tooltip"
-            title={ToolTips.SCHEDULER_DELAY} data-placement="right">Scheduler delay</span></td>
+            title={ToolTips.SCHEDULER_DELAY} data-placement="right">Scheduler Delay</span></td>
           val schedulerDelayQuantiles = schedulerDelayTitle +:
-            Distribution(schedulerDelays).get.getQuantiles().map { millis =>
-              <td>{UIUtils.formatDuration(millis.toLong)}</td>
-            }
+            getFormattedTimeQuantiles(schedulerDelays)
 
-          def getQuantileCols(data: Seq[Double]) =
+          def getFormattedSizeQuantiles(data: Seq[Double]) =
             Distribution(data).get.getQuantiles().map(d => <td>{Utils.bytesToString(d.toLong)}</td>)
 
           val inputSizes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.inputMetrics.map(_.bytesRead).getOrElse(0L).toDouble
           }
-          val inputQuantiles = <td>Input</td> +: getQuantileCols(inputSizes)
+          val inputQuantiles = <td>Input</td> +: getFormattedSizeQuantiles(inputSizes)
 
           val shuffleReadSizes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.shuffleReadMetrics.map(_.remoteBytesRead).getOrElse(0L).toDouble
           }
           val shuffleReadQuantiles = <td>Shuffle Read (Remote)</td> +:
-            getQuantileCols(shuffleReadSizes)
+            getFormattedSizeQuantiles(shuffleReadSizes)
 
           val shuffleWriteSizes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.shuffleWriteMetrics.map(_.shuffleBytesWritten).getOrElse(0L).toDouble
           }
-          val shuffleWriteQuantiles = <td>Shuffle Write</td> +: getQuantileCols(shuffleWriteSizes)
+          val shuffleWriteQuantiles = <td>Shuffle Write</td> +:
+            getFormattedSizeQuantiles(shuffleWriteSizes)
 
           val memoryBytesSpilledSizes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.memoryBytesSpilled.toDouble
           }
           val memoryBytesSpilledQuantiles = <td>Shuffle spill (memory)</td> +:
-            getQuantileCols(memoryBytesSpilledSizes)
+            getFormattedSizeQuantiles(memoryBytesSpilledSizes)
 
           val diskBytesSpilledSizes = validTasks.map { case TaskUIData(_, metrics, _) =>
             metrics.get.diskBytesSpilled.toDouble
           }
           val diskBytesSpilledQuantiles = <td>Shuffle spill (disk)</td> +:
-            getQuantileCols(diskBytesSpilledSizes)
+            getFormattedSizeQuantiles(diskBytesSpilledSizes)
 
           val listings: Seq[Seq[Node]] = Seq(
-            serializationQuantiles,
-            serviceQuantiles,
-            gettingResultQuantiles,
-            schedulerDelayQuantiles,
-            if (hasInput) inputQuantiles else Nil,
-            if (hasShuffleRead) shuffleReadQuantiles else Nil,
-            if (hasShuffleWrite) shuffleWriteQuantiles else Nil,
-            if (hasBytesSpilled) memoryBytesSpilledQuantiles else Nil,
-            if (hasBytesSpilled) diskBytesSpilledQuantiles else Nil)
+            <tr>{serviceQuantiles}</tr>,
+            <tr class={TaskDetailsClassNames.SCHEDULER_DELAY}>{schedulerDelayQuantiles}</tr>,
+            <tr class={TaskDetailsClassNames.GC_TIME}>{gcQuantiles}</tr>,
+            <tr class={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}>
+              {serializationQuantiles}
+            </tr>,
+            <tr class={TaskDetailsClassNames.GETTING_RESULT_TIME}>{gettingResultQuantiles}</tr>,
+            if (hasInput) <tr>{inputQuantiles}</tr> else Nil,
+            if (hasShuffleRead) <tr>{shuffleReadQuantiles}</tr> else Nil,
+            if (hasShuffleWrite) <tr>{shuffleWriteQuantiles}</tr> else Nil,
+            if (hasBytesSpilled) <tr>{memoryBytesSpilledQuantiles}</tr> else Nil,
+            if (hasBytesSpilled) <tr>{diskBytesSpilledQuantiles}</tr> else Nil)
 
           val quantileHeaders = Seq("Metric", "Min", "25th percentile",
             "Median", "75th percentile", "Max")
-          def quantileRow(data: Seq[Node]): Seq[Node] = <tr>{data}</tr>
-          Some(UIUtils.listingTable(quantileHeaders, quantileRow, listings, fixedWidth = true))
+          Some(UIUtils.listingTable(
+            quantileHeaders, identity[Seq[Node]], listings, fixedWidth = true))
         }
 
       val executorTable = new ExecutorTable(stageId, stageAttemptId, parent)
@@ -221,6 +286,7 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
 
       val content =
         summary ++
+        showAdditionalMetrics ++
         <h4>Summary Metrics for {numCompleted} Completed Tasks</h4> ++
         <div>{summaryTable.getOrElse("No tasks have reported metrics yet.")}</div> ++
         <h4>Aggregated Metrics by Executor</h4> ++ executorTable.toNodeSeq ++
@@ -241,8 +307,10 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
         else metrics.map(_.executorRunTime).getOrElse(1L)
       val formatDuration = if (info.status == "RUNNING") UIUtils.formatDuration(duration)
         else metrics.map(m => UIUtils.formatDuration(m.executorRunTime)).getOrElse("")
+      val schedulerDelay = getSchedulerDelay(info, metrics.get)
       val gcTime = metrics.map(_.jvmGCTime).getOrElse(0L)
       val serializationTime = metrics.map(_.resultSerializationTime).getOrElse(0L)
+      val gettingResultTime = info.gettingResultTime
 
       val maybeInput = metrics.flatMap(_.inputMetrics)
       val inputSortable = maybeInput.map(_.bytesRead.toString).getOrElse("")
@@ -287,20 +355,25 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
         <td sorttable_customkey={duration.toString}>
           {formatDuration}
         </td>
-        <td sorttable_customkey={gcTime.toString}>
-          {if (gcTime > 0) UIUtils.formatDuration(gcTime) else ""}
-        </td>
         <td>
           {Unparsed(
-            info.accumulables.map{acc => s"${acc.name}: ${acc.update.get}"}.mkString("<br/>")
-          )}
+          info.accumulables.map{acc => s"${acc.name}: ${acc.update.get}"}.mkString("<br/>"))}
+        </td>
+        <td sorttable_customkey={schedulerDelay.toString}
+            class={TaskDetailsClassNames.SCHEDULER_DELAY}>
+          {UIUtils.formatDuration(schedulerDelay.toLong)}
         </td>
-        <!--
-        TODO: Add this back after we add support to hide certain columns.
-        <td sorttable_customkey={serializationTime.toString}>
-          {if (serializationTime > 0) UIUtils.formatDuration(serializationTime) else ""}
+        <td sorttable_customkey={gcTime.toString} class={TaskDetailsClassNames.GC_TIME}>
+          {if (gcTime > 0) UIUtils.formatDuration(gcTime) else ""}
+        </td>
+        <td sorttable_customkey={serializationTime.toString}
+            class={TaskDetailsClassNames.RESULT_SERIALIZATION_TIME}>
+          {UIUtils.formatDuration(serializationTime)}
+        </td>
+        <td sorttable_customkey={gettingResultTime.toString}
+            class={TaskDetailsClassNames.GETTING_RESULT_TIME}>
+          {UIUtils.formatDuration(gettingResultTime)}
         </td>
-        -->
         {if (hasInput) {
           <td sorttable_customkey={inputSortable}>
             {inputReadable}
@@ -333,4 +406,15 @@ private[ui] class StagePage(parent: JobProgressTab) extends WebUIPage("stage") {
       </tr>
     }
   }
+
+  private def getSchedulerDelay(info: TaskInfo, metrics: TaskMetrics): Long = {
+    val totalExecutionTime = {
+      if (info.gettingResultTime > 0) {
+        (info.gettingResultTime - info.launchTime)
+      } else {
+        (info.finishTime - info.launchTime)
+      }
+    }
+    totalExecutionTime - metrics.executorRunTime
+  }
 }
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/TaskDetailsClassNames.scala b/core/src/main/scala/org/apache/spark/ui/jobs/TaskDetailsClassNames.scala
new file mode 100644
index 0000000000000000000000000000000000000000..23d672cabda07ccdfef92fb2460b85c6ac96b291
--- /dev/null
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/TaskDetailsClassNames.scala
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ui.jobs
+
+/**
+ * Names of the CSS classes corresponding to each type of task detail. Used to allow users
+ * to optionally show/hide columns.
+ */
+private object TaskDetailsClassNames {
+  val SCHEDULER_DELAY = "scheduler_delay"
+  val GC_TIME = "gc_time"
+  val RESULT_SERIALIZATION_TIME = "serialization_time"
+  val GETTING_RESULT_TIME = "getting_result_time"
+}