aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJacek Laskowski <jacek@japila.pl>2016-11-02 09:21:26 +0000
committerSean Owen <sowen@cloudera.com>2016-11-02 09:21:26 +0000
commit70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd (patch)
treed2e51c677e7a0fc1af79d892e111550c4fc9536f
parent98ede49496d0d7b4724085083d4f24436b92a7bf (diff)
downloadspark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.tar.gz
spark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.tar.bz2
spark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.zip
[SPARK-18204][WEBUI] Remove SparkUI.appUIAddress
## What changes were proposed in this pull request? Removing `appUIAddress` attribute since it is no longer in use. ## How was this patch tested? Local build Author: Jacek Laskowski <jacek@japila.pl> Closes #15603 from jaceklaskowski/sparkui-fixes.
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala13
-rw-r--r--core/src/main/scala/org/apache/spark/ui/WebUI.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala16
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala13
-rw-r--r--mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala2
-rw-r--r--mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala2
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/UISeleniumSuite.scala12
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala2
-rw-r--r--yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala2
11 files changed, 36 insertions, 44 deletions
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
index 04d40e2907..368cd30a2e 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
@@ -93,7 +93,7 @@ private[spark] class StandaloneSchedulerBackend(
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
- val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
+ val webUrl = sc.ui.map(_.webUrl).getOrElse("")
val coresPerExecutor = conf.getOption("spark.executor.cores").map(_.toInt)
// If we're using dynamic allocation, set our initial executor limit to 0 for now.
// ExecutorAllocationManager will send the real initial limit to the Master later.
@@ -103,8 +103,8 @@ private[spark] class StandaloneSchedulerBackend(
} else {
None
}
- val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
- appUIAddress, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
+ val appDesc = ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
+ webUrl, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
client = new StandaloneAppClient(sc.env.rpcEnv, masters, appDesc, this, conf)
client.start()
launcherBackend.setState(SparkAppHandle.State.SUBMITTED)
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index f631a047a7..b828532aba 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -82,7 +82,7 @@ private[spark] class SparkUI private (
initialize()
def getSparkUser: String = {
- environmentListener.systemProperties.toMap.get("user.name").getOrElse("<unknown>")
+ environmentListener.systemProperties.toMap.getOrElse("user.name", "<unknown>")
}
def getAppName: String = appName
@@ -94,16 +94,9 @@ private[spark] class SparkUI private (
/** Stop the server behind this web interface. Only valid after bind(). */
override def stop() {
super.stop()
- logInfo("Stopped Spark web UI at %s".format(appUIAddress))
+ logInfo(s"Stopped Spark web UI at $webUrl")
}
- /**
- * Return the application UI host:port. This does not include the scheme (http://).
- */
- private[spark] def appUIHostPort = publicHostName + ":" + boundPort
-
- private[spark] def appUIAddress = s"http://$appUIHostPort"
-
def getSparkUI(appId: String): Option[SparkUI] = {
if (appId == this.appId) Some(this) else None
}
@@ -136,7 +129,7 @@ private[spark] class SparkUI private (
private[spark] abstract class SparkUITab(parent: SparkUI, prefix: String)
extends WebUITab(parent, prefix) {
- def appName: String = parent.getAppName
+ def appName: String = parent.appName
}
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index a05e0efb7a..8c80155867 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -56,8 +56,8 @@ private[spark] abstract class WebUI(
private val className = Utils.getFormattedClassName(this)
def getBasePath: String = basePath
- def getTabs: Seq[WebUITab] = tabs.toSeq
- def getHandlers: Seq[ServletContextHandler] = handlers.toSeq
+ def getTabs: Seq[WebUITab] = tabs
+ def getHandlers: Seq[ServletContextHandler] = handlers
def getSecurityManager: SecurityManager = securityManager
/** Attach a tab to this UI, along with all of its attached pages. */
@@ -133,7 +133,7 @@ private[spark] abstract class WebUI(
def initialize(): Unit
/** Bind to the HTTP server behind this web interface. */
- def bind() {
+ def bind(): Unit = {
assert(!serverInfo.isDefined, s"Attempted to bind $className more than once!")
try {
val host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0")
@@ -156,7 +156,7 @@ private[spark] abstract class WebUI(
def boundPort: Int = serverInfo.map(_.boundPort).getOrElse(-1)
/** Stop the server behind this web interface. Only valid after bind(). */
- def stop() {
+ def stop(): Unit = {
assert(serverInfo.isDefined,
s"Attempted to stop $className before binding to a server!")
serverInfo.get.stop()
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
index 173fc3cf31..50e8e2d19e 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
@@ -289,8 +289,8 @@ private[ui] class AllJobsPage(parent: JobsTab) extends WebUIPage("") {
val startTime = listener.startTime
val endTime = listener.endTime
val activeJobs = listener.activeJobs.values.toSeq
- val completedJobs = listener.completedJobs.reverse.toSeq
- val failedJobs = listener.failedJobs.reverse.toSeq
+ val completedJobs = listener.completedJobs.reverse
+ val failedJobs = listener.failedJobs.reverse
val activeJobsTable =
jobsTable(request, "active", "activeJob", activeJobs, killEnabled = parent.killEnabled)
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
index e5d408a167..f4786e3931 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -473,7 +473,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(1 to 10).map{x => Thread.sleep(10000); x}.countAsync()
eventually(timeout(5 seconds), interval(50 milliseconds)) {
val url = new URL(
- sc.ui.get.appUIAddress.stripSuffix("/") + "/stages/stage/kill/?id=0")
+ sc.ui.get.webUrl.stripSuffix("/") + "/stages/stage/kill/?id=0")
// SPARK-6846: should be POST only but YARN AM doesn't proxy POST
getResponseCode(url, "GET") should be (200)
getResponseCode(url, "POST") should be (200)
@@ -486,7 +486,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(1 to 10).map{x => Thread.sleep(10000); x}.countAsync()
eventually(timeout(5 seconds), interval(50 milliseconds)) {
val url = new URL(
- sc.ui.get.appUIAddress.stripSuffix("/") + "/jobs/job/kill/?id=0")
+ sc.ui.get.webUrl.stripSuffix("/") + "/jobs/job/kill/?id=0")
// SPARK-6846: should be POST only but YARN AM doesn't proxy POST
getResponseCode(url, "GET") should be (200)
getResponseCode(url, "POST") should be (200)
@@ -620,7 +620,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
test("live UI json application list") {
withSpark(newSparkContext()) { sc =>
val appListRawJson = HistoryServerSuite.getUrl(new URL(
- sc.ui.get.appUIAddress + "/api/v1/applications"))
+ sc.ui.get.webUrl + "/api/v1/applications"))
val appListJsonAst = JsonMethods.parse(appListRawJson)
appListJsonAst.children.length should be (1)
val attempts = (appListJsonAst \ "attempts").children
@@ -640,7 +640,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(Seq(1, 2, 3)).map(identity).groupBy(identity).map(identity).groupBy(identity)
rdd.count()
- val stage0 = Source.fromURL(sc.ui.get.appUIAddress +
+ val stage0 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=0&attempt=0&expandDagViz=true").mkString
assert(stage0.contains("digraph G {\n subgraph clusterstage_0 {\n " +
"label=&quot;Stage 0&quot;;\n subgraph "))
@@ -651,7 +651,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
assert(stage0.contains("{\n label=&quot;groupBy&quot;;\n " +
"2 [label=&quot;MapPartitionsRDD [2]"))
- val stage1 = Source.fromURL(sc.ui.get.appUIAddress +
+ val stage1 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=1&attempt=0&expandDagViz=true").mkString
assert(stage1.contains("digraph G {\n subgraph clusterstage_1 {\n " +
"label=&quot;Stage 1&quot;;\n subgraph "))
@@ -662,7 +662,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
assert(stage1.contains("{\n label=&quot;groupBy&quot;;\n " +
"5 [label=&quot;MapPartitionsRDD [5]"))
- val stage2 = Source.fromURL(sc.ui.get.appUIAddress +
+ val stage2 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=2&attempt=0&expandDagViz=true").mkString
assert(stage2.contains("digraph G {\n subgraph clusterstage_2 {\n " +
"label=&quot;Stage 2&quot;;\n subgraph "))
@@ -687,7 +687,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
def goToUi(ui: SparkUI, path: String): Unit = {
- go to (ui.appUIAddress.stripSuffix("/") + path)
+ go to (ui.webUrl.stripSuffix("/") + path)
}
def parseDate(json: JValue): Long = {
@@ -699,6 +699,6 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
def apiUrl(ui: SparkUI, path: String): URL = {
- new URL(ui.appUIAddress + "/api/v1/applications/" + ui.sc.get.applicationId + "/" + path)
+ new URL(ui.webUrl + "/api/v1/applications/" + ui.sc.get.applicationId + "/" + path)
}
}
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 4abcfb7e51..68c7657cb3 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -66,7 +66,7 @@ class UISuite extends SparkFunSuite {
withSpark(newSparkContext()) { sc =>
// test if the ui is visible, and all the expected tabs are visible
eventually(timeout(10 seconds), interval(50 milliseconds)) {
- val html = Source.fromURL(sc.ui.get.appUIAddress).mkString
+ val html = Source.fromURL(sc.ui.get.webUrl).mkString
assert(!html.contains("random data that should not be present"))
assert(html.toLowerCase.contains("stages"))
assert(html.toLowerCase.contains("storage"))
@@ -176,19 +176,18 @@ class UISuite extends SparkFunSuite {
}
}
- test("verify appUIAddress contains the scheme") {
+ test("verify webUrl contains the scheme") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
- val uiAddress = ui.appUIAddress
- val uiHostPort = ui.appUIHostPort
- assert(uiAddress.equals("http://" + uiHostPort))
+ val uiAddress = ui.webUrl
+ assert(uiAddress.startsWith("http://") || uiAddress.startsWith("https://"))
}
}
- test("verify appUIAddress contains the port") {
+ test("verify webUrl contains the port") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
- val splitUIAddress = ui.appUIAddress.split(':')
+ val splitUIAddress = ui.webUrl.split(':')
val boundPort = ui.boundPort
assert(splitUIAddress(2).toInt == boundPort)
}
diff --git a/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala b/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
index 5063c1fe98..842c05e7bf 100644
--- a/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
+++ b/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
@@ -158,7 +158,7 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
sc.sparkUser,
sc.appName,
sc.conf,
- sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.appUIAddress)),
+ sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.webUrl)),
None,
None,
sc.conf.getOption("spark.mesos.driver.frameworkId")
diff --git a/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala b/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala
index 09a252f3c7..c1aa00151e 100644
--- a/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala
+++ b/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala
@@ -77,7 +77,7 @@ private[spark] class MesosFineGrainedSchedulerBackend(
sc.sparkUser,
sc.appName,
sc.conf,
- sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.appUIAddress)),
+ sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.webUrl)),
Option.empty,
Option.empty,
sc.conf.getOption("spark.mesos.driver.frameworkId")
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/UISeleniumSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/UISeleniumSuite.scala
index 454c3dffa3..e7cec999c2 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/UISeleniumSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/UISeleniumSuite.scala
@@ -92,13 +92,13 @@ class UISeleniumSuite
val sparkUI = ssc.sparkContext.ui.get
eventually(timeout(10 seconds), interval(50 milliseconds)) {
- go to (sparkUI.appUIAddress.stripSuffix("/"))
+ go to (sparkUI.webUrl.stripSuffix("/"))
find(cssSelector( """ul li a[href*="streaming"]""")) should not be (None)
}
eventually(timeout(10 seconds), interval(50 milliseconds)) {
// check whether streaming page exists
- go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming")
+ go to (sparkUI.webUrl.stripSuffix("/") + "/streaming")
val h3Text = findAll(cssSelector("h3")).map(_.text).toSeq
h3Text should contain("Streaming Statistics")
@@ -180,23 +180,23 @@ class UISeleniumSuite
jobDetails should contain("Completed Stages:")
// Check a batch page without id
- go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming/batch/")
+ go to (sparkUI.webUrl.stripSuffix("/") + "/streaming/batch/")
webDriver.getPageSource should include ("Missing id parameter")
// Check a non-exist batch
- go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming/batch/?id=12345")
+ go to (sparkUI.webUrl.stripSuffix("/") + "/streaming/batch/?id=12345")
webDriver.getPageSource should include ("does not exist")
}
ssc.stop(false)
eventually(timeout(10 seconds), interval(50 milliseconds)) {
- go to (sparkUI.appUIAddress.stripSuffix("/"))
+ go to (sparkUI.webUrl.stripSuffix("/"))
find(cssSelector( """ul li a[href*="streaming"]""")) should be(None)
}
eventually(timeout(10 seconds), interval(50 milliseconds)) {
- go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming")
+ go to (sparkUI.webUrl.stripSuffix("/") + "/streaming")
val h3Text = findAll(cssSelector("h3")).map(_.text).toSeq
h3Text should not contain("Streaming Statistics")
}
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index aabae140af..f2b9dfb4d1 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -406,7 +406,7 @@ private[spark] class ApplicationMaster(
sc.getConf.get("spark.driver.host"),
sc.getConf.get("spark.driver.port"),
isClusterMode = true)
- registerAM(sc.getConf, rpcEnv, driverRef, sc.ui.map(_.appUIAddress).getOrElse(""),
+ registerAM(sc.getConf, rpcEnv, driverRef, sc.ui.map(_.webUrl).getOrElse(""),
securityMgr)
} else {
// Sanity check; should never happen in normal operation, since sc should only be null
diff --git a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
index d8b36c5fea..60da356ad1 100644
--- a/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
+++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala
@@ -44,7 +44,7 @@ private[spark] class YarnClientSchedulerBackend(
val driverHost = conf.get("spark.driver.host")
val driverPort = conf.get("spark.driver.port")
val hostport = driverHost + ":" + driverPort
- sc.ui.foreach { ui => conf.set("spark.driver.appUIAddress", ui.appUIAddress) }
+ sc.ui.foreach { ui => conf.set("spark.driver.appUIAddress", ui.webUrl) }
val argsArrayBuf = new ArrayBuffer[String]()
argsArrayBuf += ("--arg", hostport)