diff --git a/core/pom.xml b/core/pom.xml
index 7b68dbaea4789bc858676611861c4065a939e9c0..320d1076f7c033becf6e0497ea27ea78de16010f 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -247,6 +247,11 @@
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>org.seleniumhq.selenium</groupId>
+      <artifactId>selenium-java</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 32e6b15bb0999993e06fd0ebf3969051508b1127..76714b1e6964f154f8854c828b083a7a2bcec415 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -20,7 +20,7 @@ package org.apache.spark.ui
 import java.text.SimpleDateFormat
 import java.util.{Locale, Date}
 
-import scala.xml.Node
+import scala.xml.{Text, Node}
 
 import org.apache.spark.Logging
 
@@ -239,7 +239,8 @@ private[spark] object UIUtils extends Logging {
       headers: Seq[String],
       generateDataRow: T => Seq[Node],
       data: Iterable[T],
-      fixedWidth: Boolean = false): Seq[Node] = {
+      fixedWidth: Boolean = false,
+      id: Option[String] = None): Seq[Node] = {
 
     var listingTableClass = TABLE_CLASS
     if (fixedWidth) {
@@ -263,7 +264,7 @@ private[spark] object UIUtils extends Logging {
         }
       }
     }
-    <table class={listingTableClass}>
+    <table class={listingTableClass} id={id.map(Text.apply)}>
       <thead>{headerRow}</thead>
       <tbody>
         {data.map(r => generateDataRow(r))}
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index 5d88ca403a6740857de816e67f7fdedfaeee783b..9be65a4a39a09719bcbb27078fb0597cce8bb42c 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -82,7 +82,7 @@ private[spark] abstract class WebUI(
   }
 
   /** Detach a handler from this UI. */
-  def detachHandler(handler: ServletContextHandler) {
+  protected def detachHandler(handler: ServletContextHandler) {
     handlers -= handler
     serverInfo.foreach { info =>
       info.rootHandler.removeHandler(handler)
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index 8a0075ae8daf7e3d34905e3fe53bd9b5fbd79e9c..12d23a92878cf6a0fcb8321009d0d478ad25aff2 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -39,7 +39,8 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") {
 
     // Worker table
     val workers = storageStatusList.map((rddId, _))
-    val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers)
+    val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers,
+      id = Some("rdd-storage-by-worker-table"))
 
     // Block table
     val blockLocations = StorageUtils.getRddBlockLocations(rddId, storageStatusList)
@@ -49,7 +50,8 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") {
       .map { case (blockId, status) =>
         (blockId, status, blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
       }
-    val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks)
+    val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks,
+      id = Some("rdd-storage-by-block-table"))
 
     val content =
       <div class="row-fluid">
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
index 83489ca0679ee92e8c62aac72b20b7dbdd6dc9b0..6ced6052d2b1815cb49ffa76526e802bfba5820c 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
@@ -31,7 +31,7 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
 
   def render(request: HttpServletRequest): Seq[Node] = {
     val rdds = listener.rddInfoList
-    val content = UIUtils.listingTable(rddHeader, rddRow, rdds)
+    val content = UIUtils.listingTable(rddHeader, rddRow, rdds, id = Some("storage-by-rdd-table"))
     UIUtils.headerSparkPage("Storage", content, parent)
   }
 
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
new file mode 100644
index 0000000000000000000000000000000000000000..bacf6a16fc233e0869d5c14738bb253d49a8210d
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ui
+
+import org.apache.spark.api.java.StorageLevels
+import org.apache.spark.{SparkException, SparkConf, SparkContext}
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.htmlunit.HtmlUnitDriver
+import org.scalatest._
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.time.SpanSugar._
+
+import org.apache.spark.LocalSparkContext._
+
+/**
+ * Selenium tests for the Spark Web UI.  These tests are not run by default
+ * because they're slow.
+ */
+@DoNotDiscover
+class UISeleniumSuite extends FunSuite with WebBrowser with Matchers {
+  implicit val webDriver: WebDriver = new HtmlUnitDriver
+
+  /**
+   * Create a test SparkContext with the SparkUI enabled.
+   * It is safe to `get` the SparkUI directly from the SparkContext returned here.
+   */
+  private def newSparkContext(): SparkContext = {
+    val conf = new SparkConf()
+      .setMaster("local")
+      .setAppName("test")
+      .set("spark.ui.enabled", "true")
+    val sc = new SparkContext(conf)
+    assert(sc.ui.isDefined)
+    sc
+  }
+
+  test("effects of unpersist() / persist() should be reflected") {
+    // Regression test for SPARK-2527
+    withSpark(newSparkContext()) { sc =>
+      val ui = sc.ui.get
+      val rdd = sc.parallelize(Seq(1, 2, 3))
+      rdd.persist(StorageLevels.DISK_ONLY).count()
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+        val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.DISK_ONLY.description)
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+        val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.DISK_ONLY.description)
+      }
+
+      rdd.unpersist()
+      rdd.persist(StorageLevels.MEMORY_ONLY).count()
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+        val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+        val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+        tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+      }
+    }
+  }
+
+  test("failed stages should not appear to be active") {
+    withSpark(newSparkContext()) { sc =>
+      // Regression test for SPARK-3021
+      intercept[SparkException] {
+        sc.parallelize(1 to 10).map { x => throw new Exception()}.collect()
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to sc.ui.get.appUIAddress
+        find(id("active")).get.text should be("Active Stages (0)")
+        find(id("failed")).get.text should be("Failed Stages (1)")
+      }
+
+      // Regression test for SPARK-2105
+      class NotSerializable
+      val unserializableObject = new NotSerializable
+      intercept[SparkException] {
+        sc.parallelize(1 to 10).map { x => unserializableObject}.collect()
+      }
+      eventually(timeout(5 seconds), interval(50 milliseconds)) {
+        go to sc.ui.get.appUIAddress
+        find(id("active")).get.text should be("Active Stages (0)")
+        // The failure occurs before the stage becomes active, hence we should still show only one
+        // failed stage, not two:
+        find(id("failed")).get.text should be("Failed Stages (1)")
+      }
+    }
+  }
+}
diff --git a/pom.xml b/pom.xml
index 2faf0c7dcffd3b4ff0da82a5a0be70ac7db38b40..2ebe1b8da588a36f433c8b70adfdd14b1364a01e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -312,6 +312,12 @@
         <artifactId>jsr305</artifactId>
         <version>1.3.9</version>
       </dependency>
+      <dependency>
+        <groupId>org.seleniumhq.selenium</groupId>
+        <artifactId>selenium-java</artifactId>
+        <version>2.42.2</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
@@ -520,7 +526,7 @@
       <dependency>
         <groupId>org.scalatest</groupId>
         <artifactId>scalatest_${scala.binary.version}</artifactId>
-        <version>2.1.5</version>
+        <version>2.2.1</version>
         <scope>test</scope>
       </dependency>
       <dependency>
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 6dc5942023f9e0e0f082cc269d02382771dde3a6..f134d734505155de1258528e8422dacafe1e0f3e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.immutable.HashSet
 
 import org.scalatest.FunSuite
 import org.scalatest.Matchers._
-import org.scalautils.TripleEqualsSupport.Spread
+import org.scalactic.TripleEqualsSupport.Spread
 
 import org.apache.spark.sql.catalyst.types._