aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorJacek Laskowski <jacek@japila.pl>2016-11-02 09:21:26 +0000
committerSean Owen <sowen@cloudera.com>2016-11-02 09:21:26 +0000
commit70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd (patch)
treed2e51c677e7a0fc1af79d892e111550c4fc9536f /core/src/main
parent98ede49496d0d7b4724085083d4f24436b92a7bf (diff)
downloadspark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.tar.gz
spark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.tar.bz2
spark-70a5db7bbd192a4bc68bcfdc475ab221adf2fcdd.zip
[SPARK-18204][WEBUI] Remove SparkUI.appUIAddress
## What changes were proposed in this pull request? Removing `appUIAddress` attribute since it is no longer in use. ## How was this patch tested? Local build Author: Jacek Laskowski <jacek@japila.pl> Closes #15603 from jaceklaskowski/sparkui-fixes.
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala13
-rw-r--r--core/src/main/scala/org/apache/spark/ui/WebUI.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala4
4 files changed, 12 insertions, 19 deletions
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
index 04d40e2907..368cd30a2e 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
@@ -93,7 +93,7 @@ private[spark] class StandaloneSchedulerBackend(
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
- val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
+ val webUrl = sc.ui.map(_.webUrl).getOrElse("")
val coresPerExecutor = conf.getOption("spark.executor.cores").map(_.toInt)
// If we're using dynamic allocation, set our initial executor limit to 0 for now.
// ExecutorAllocationManager will send the real initial limit to the Master later.
@@ -103,8 +103,8 @@ private[spark] class StandaloneSchedulerBackend(
} else {
None
}
- val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
- appUIAddress, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
+ val appDesc = ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
+ webUrl, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
client = new StandaloneAppClient(sc.env.rpcEnv, masters, appDesc, this, conf)
client.start()
launcherBackend.setState(SparkAppHandle.State.SUBMITTED)
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index f631a047a7..b828532aba 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -82,7 +82,7 @@ private[spark] class SparkUI private (
initialize()
def getSparkUser: String = {
- environmentListener.systemProperties.toMap.get("user.name").getOrElse("<unknown>")
+ environmentListener.systemProperties.toMap.getOrElse("user.name", "<unknown>")
}
def getAppName: String = appName
@@ -94,16 +94,9 @@ private[spark] class SparkUI private (
/** Stop the server behind this web interface. Only valid after bind(). */
override def stop() {
super.stop()
- logInfo("Stopped Spark web UI at %s".format(appUIAddress))
+ logInfo(s"Stopped Spark web UI at $webUrl")
}
- /**
- * Return the application UI host:port. This does not include the scheme (http://).
- */
- private[spark] def appUIHostPort = publicHostName + ":" + boundPort
-
- private[spark] def appUIAddress = s"http://$appUIHostPort"
-
def getSparkUI(appId: String): Option[SparkUI] = {
if (appId == this.appId) Some(this) else None
}
@@ -136,7 +129,7 @@ private[spark] class SparkUI private (
private[spark] abstract class SparkUITab(parent: SparkUI, prefix: String)
extends WebUITab(parent, prefix) {
- def appName: String = parent.getAppName
+ def appName: String = parent.appName
}
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index a05e0efb7a..8c80155867 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -56,8 +56,8 @@ private[spark] abstract class WebUI(
private val className = Utils.getFormattedClassName(this)
def getBasePath: String = basePath
- def getTabs: Seq[WebUITab] = tabs.toSeq
- def getHandlers: Seq[ServletContextHandler] = handlers.toSeq
+ def getTabs: Seq[WebUITab] = tabs
+ def getHandlers: Seq[ServletContextHandler] = handlers
def getSecurityManager: SecurityManager = securityManager
/** Attach a tab to this UI, along with all of its attached pages. */
@@ -133,7 +133,7 @@ private[spark] abstract class WebUI(
def initialize(): Unit
/** Bind to the HTTP server behind this web interface. */
- def bind() {
+ def bind(): Unit = {
assert(!serverInfo.isDefined, s"Attempted to bind $className more than once!")
try {
val host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0")
@@ -156,7 +156,7 @@ private[spark] abstract class WebUI(
def boundPort: Int = serverInfo.map(_.boundPort).getOrElse(-1)
/** Stop the server behind this web interface. Only valid after bind(). */
- def stop() {
+ def stop(): Unit = {
assert(serverInfo.isDefined,
s"Attempted to stop $className before binding to a server!")
serverInfo.get.stop()
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
index 173fc3cf31..50e8e2d19e 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala
@@ -289,8 +289,8 @@ private[ui] class AllJobsPage(parent: JobsTab) extends WebUIPage("") {
val startTime = listener.startTime
val endTime = listener.endTime
val activeJobs = listener.activeJobs.values.toSeq
- val completedJobs = listener.completedJobs.reverse.toSeq
- val failedJobs = listener.failedJobs.reverse.toSeq
+ val completedJobs = listener.completedJobs.reverse
+ val failedJobs = listener.failedJobs.reverse
val activeJobsTable =
jobsTable(request, "active", "activeJob", activeJobs, killEnabled = parent.killEnabled)