aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/pom.xml5
-rw-r--r--core/src/main/scala/org/apache/spark/ui/UIUtils.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/ui/WebUI.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala112
-rw-r--r--pom.xml8
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala2
8 files changed, 135 insertions, 9 deletions
diff --git a/core/pom.xml b/core/pom.xml
index 7b68dbaea4..320d1076f7 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -248,6 +248,11 @@
</exclusions>
</dependency>
<dependency>
+ <groupId>org.seleniumhq.selenium</groupId>
+ <artifactId>selenium-java</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<scope>test</scope>
diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 32e6b15bb0..76714b1e69 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -20,7 +20,7 @@ package org.apache.spark.ui
import java.text.SimpleDateFormat
import java.util.{Locale, Date}
-import scala.xml.Node
+import scala.xml.{Text, Node}
import org.apache.spark.Logging
@@ -239,7 +239,8 @@ private[spark] object UIUtils extends Logging {
headers: Seq[String],
generateDataRow: T => Seq[Node],
data: Iterable[T],
- fixedWidth: Boolean = false): Seq[Node] = {
+ fixedWidth: Boolean = false,
+ id: Option[String] = None): Seq[Node] = {
var listingTableClass = TABLE_CLASS
if (fixedWidth) {
@@ -263,7 +264,7 @@ private[spark] object UIUtils extends Logging {
}
}
}
- <table class={listingTableClass}>
+ <table class={listingTableClass} id={id.map(Text.apply)}>
<thead>{headerRow}</thead>
<tbody>
{data.map(r => generateDataRow(r))}
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index 5d88ca403a..9be65a4a39 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -82,7 +82,7 @@ private[spark] abstract class WebUI(
}
/** Detach a handler from this UI. */
- def detachHandler(handler: ServletContextHandler) {
+ protected def detachHandler(handler: ServletContextHandler) {
handlers -= handler
serverInfo.foreach { info =>
info.rootHandler.removeHandler(handler)
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index 8a0075ae8d..12d23a9287 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -39,7 +39,8 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") {
// Worker table
val workers = storageStatusList.map((rddId, _))
- val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers)
+ val workerTable = UIUtils.listingTable(workerHeader, workerRow, workers,
+ id = Some("rdd-storage-by-worker-table"))
// Block table
val blockLocations = StorageUtils.getRddBlockLocations(rddId, storageStatusList)
@@ -49,7 +50,8 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") {
.map { case (blockId, status) =>
(blockId, status, blockLocations.get(blockId).getOrElse(Seq[String]("Unknown")))
}
- val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks)
+ val blockTable = UIUtils.listingTable(blockHeader, blockRow, blocks,
+ id = Some("rdd-storage-by-block-table"))
val content =
<div class="row-fluid">
diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
index 83489ca067..6ced6052d2 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
@@ -31,7 +31,7 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
def render(request: HttpServletRequest): Seq[Node] = {
val rdds = listener.rddInfoList
- val content = UIUtils.listingTable(rddHeader, rddRow, rdds)
+ val content = UIUtils.listingTable(rddHeader, rddRow, rdds, id = Some("storage-by-rdd-table"))
UIUtils.headerSparkPage("Storage", content, parent)
}
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
new file mode 100644
index 0000000000..bacf6a16fc
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.ui
+
+import org.apache.spark.api.java.StorageLevels
+import org.apache.spark.{SparkException, SparkConf, SparkContext}
+import org.openqa.selenium.WebDriver
+import org.openqa.selenium.htmlunit.HtmlUnitDriver
+import org.scalatest._
+import org.scalatest.concurrent.Eventually._
+import org.scalatest.selenium.WebBrowser
+import org.scalatest.time.SpanSugar._
+
+import org.apache.spark.LocalSparkContext._
+
+/**
+ * Selenium tests for the Spark Web UI. These tests are not run by default
+ * because they're slow.
+ */
+@DoNotDiscover
+class UISeleniumSuite extends FunSuite with WebBrowser with Matchers {
+ implicit val webDriver: WebDriver = new HtmlUnitDriver
+
+ /**
+ * Create a test SparkContext with the SparkUI enabled.
+ * It is safe to `get` the SparkUI directly from the SparkContext returned here.
+ */
+ private def newSparkContext(): SparkContext = {
+ val conf = new SparkConf()
+ .setMaster("local")
+ .setAppName("test")
+ .set("spark.ui.enabled", "true")
+ val sc = new SparkContext(conf)
+ assert(sc.ui.isDefined)
+ sc
+ }
+
+ test("effects of unpersist() / persist() should be reflected") {
+ // Regression test for SPARK-2527
+ withSpark(newSparkContext()) { sc =>
+ val ui = sc.ui.get
+ val rdd = sc.parallelize(Seq(1, 2, 3))
+ rdd.persist(StorageLevels.DISK_ONLY).count()
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+ val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.DISK_ONLY.description)
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+ val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.DISK_ONLY.description)
+ }
+
+ rdd.unpersist()
+ rdd.persist(StorageLevels.MEMORY_ONLY).count()
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage")
+ val tableRowText = findAll(cssSelector("#storage-by-rdd-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to (ui.appUIAddress.stripSuffix("/") + "/storage/rdd/?id=0")
+ val tableRowText = findAll(cssSelector("#rdd-storage-by-block-table td")).map(_.text).toSeq
+ tableRowText should contain (StorageLevels.MEMORY_ONLY.description)
+ }
+ }
+ }
+
+ test("failed stages should not appear to be active") {
+ withSpark(newSparkContext()) { sc =>
+ // Regression test for SPARK-3021
+ intercept[SparkException] {
+ sc.parallelize(1 to 10).map { x => throw new Exception()}.collect()
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to sc.ui.get.appUIAddress
+ find(id("active")).get.text should be("Active Stages (0)")
+ find(id("failed")).get.text should be("Failed Stages (1)")
+ }
+
+ // Regression test for SPARK-2105
+ class NotSerializable
+ val unserializableObject = new NotSerializable
+ intercept[SparkException] {
+ sc.parallelize(1 to 10).map { x => unserializableObject}.collect()
+ }
+ eventually(timeout(5 seconds), interval(50 milliseconds)) {
+ go to sc.ui.get.appUIAddress
+ find(id("active")).get.text should be("Active Stages (0)")
+ // The failure occurs before the stage becomes active, hence we should still show only one
+ // failed stage, not two:
+ find(id("failed")).get.text should be("Failed Stages (1)")
+ }
+ }
+ }
+}
diff --git a/pom.xml b/pom.xml
index 2faf0c7dcf..2ebe1b8da5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -313,6 +313,12 @@
<version>1.3.9</version>
</dependency>
<dependency>
+ <groupId>org.seleniumhq.selenium</groupId>
+ <artifactId>selenium-java</artifactId>
+ <version>2.42.2</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
@@ -520,7 +526,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
- <version>2.1.5</version>
+ <version>2.2.1</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index 6dc5942023..f134d73450 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.immutable.HashSet
import org.scalatest.FunSuite
import org.scalatest.Matchers._
-import org.scalautils.TripleEqualsSupport.Spread
+import org.scalactic.TripleEqualsSupport.Spread
import org.apache.spark.sql.catalyst.types._