diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
index 83489ca0679e..6ced6052d2b1 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
@@ -31,7 +31,7 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
def render(request: HttpServletRequest): Seq[Node] = {
val rdds = listener.rddInfoList
- val content = UIUtils.listingTable(rddHeader, rddRow, rdds)
+ val content = UIUtils.listingTable(rddHeader, rddRow, rdds, id = Some("storage-by-rdd-table"))
UIUtils.headerSparkPage("Storage", content, parent)
}
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
new file mode 100644
index 000000000000..bacf6a16fc23
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ui
+
+import org.apache.spark.api.java.StorageLevels
+import org.apache.spark.{SparkException, SparkConf, SparkContext}
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.htmlunit.HtmlUnitDriver
+import org.scalatest._
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.time.SpanSugar._
+
+import org.apache.spark.LocalSparkContext._
+
+/**
+ * Selenium tests for the Spark Web UI. These tests are not run by default
+ * because they're slow.
+ */
+@DoNotDiscover
+class UISeleniumSuite extends FunSuite with WebBrowser with Matchers {
+ implicit val webDriver: WebDriver = new HtmlUnitDriver
+
+ /**
+ * Create a test SparkContext with the SparkUI enabled.
+ * It is safe to `get` the SparkUI directly from the SparkContext returned here.
+ */
+ private def newSparkContext(): SparkContext = {
+ val conf = new SparkConf()
+ .setMaster("local")
+ .setAppName("test")
+ .set("spark.ui.enabled", "true")
+ val sc = new SparkContext(conf)
+ assert(sc.ui.isDefined)
+ sc
+ }
+
+ test("effects of unpersist() / persist() should be reflected") {
+ // Regression test for SPARK-2527
+ withSpark(newSparkContext()) { sc =>
+ val ui = sc.ui.get
+ val rdd = sc.parallelize(Seq(1, 2, 3))
+ rdd.persist(StorageLevels.DISK_ONLY).count()
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+ val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.DISK_ONLY.description)
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+ val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.DISK_ONLY.description)
+ }
+
+ rdd.unpersist()
+ rdd.persist(StorageLevels.MEMORY_ONLY).count()
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+ val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+ val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+ }
+ }
+ }
+
+ test("failed stages should not appear to be active") {
+ withSpark(newSparkContext()) { sc =>
+ // Regression test for SPARK-3021
+ intercept[SparkException] {
+ sc.parallelize(1 to 10).map { x => throw new Exception()}.collect()
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to sc.ui.get.appUIAddress
+ find(id("active")).get.text should be("Active Stages (0)")
+ find(id("failed")).get.text should be("Failed Stages (1)")
+ }
+
+ // Regression test for SPARK-2105
+ class NotSerializable
+ val unserializableObject = new NotSerializable
+ intercept[SparkException] {
+ sc.parallelize(1 to 10).map { x => unserializableObject}.collect()
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to sc.ui.get.appUIAddress
+ find(id("active")).get.text should be("Active Stages (0)")
+ // The failure occurs before the stage becomes active, hence we should still show only one
+ // failed stage, not two:
+ find(id("failed")).get.text should be("Failed Stages (1)")
+ }
+ }
+ }
+}
diff --git a/pom.xml b/pom.xml
index 030bea948b5c..d92927a44e3b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -312,6 +312,12 @@
jsr305
1.3.9
+
+ org.seleniumhq.selenium
+ selenium-java
+ 2.42.2
+ test
+
org.slf4j
slf4j-api
@@ -520,7 +526,7 @@
org.scalatest
scalatest_${scala.binary.version}
- 2.1.5
+ 2.2.1
test
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 6dc5942023f9..f134d7345051 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.immutable.HashSet
import org.scalatest.FunSuite
import org.scalatest.Matchers._
-import org.scalautils.TripleEqualsSupport.Spread
+import org.scalactic.TripleEqualsSupport.Spread
import org.apache.spark.sql.catalyst.types._