From 2e44031fafdb8cf486573b98e4faa6b31ffb90a4 Mon Sep 17 00:00:00 2001 From: Devaraj K Date: Thu, 25 Feb 2016 12:18:43 +0000 Subject: [SPARK-13117][WEB UI] WebUI should use the local ip not 0.0.0.0 Fixed the HTTP Server Host Name/IP issue i.e. HTTP Server to take the configured host name/IP and not '0.0.0.0' always. Author: Devaraj K Closes #11133 from devaraj-kavali/SPARK-13117. --- core/src/main/scala/org/apache/spark/ui/WebUI.scala | 2 +- .../test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) (limited to 'core') diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala index fe4949b9f6..e515916f31 100644 --- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala @@ -134,7 +134,7 @@ private[spark] abstract class WebUI( def bind() { assert(!serverInfo.isDefined, "Attempted to bind %s more than once!".format(className)) try { - serverInfo = Some(startJettyServer("0.0.0.0", port, sslOptions, handlers, conf, name)) + serverInfo = Some(startJettyServer(publicHostName, port, sslOptions, handlers, conf, name)) logInfo("Started %s at http://%s:%d".format(className, publicHostName, boundPort)) } catch { case e: Exception => diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala index f416ace5c2..972b552f7a 100644 --- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala @@ -26,6 +26,7 @@ import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSui import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded} import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.util.SparkConfWithEnv +import org.apache.spark.util.Utils class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext { @@ -53,7 +54,7 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext { } test("verify that log urls reflect SPARK_PUBLIC_DNS (SPARK-6175)") { - val SPARK_PUBLIC_DNS = "public_dns" + val SPARK_PUBLIC_DNS = Utils.localHostNameForURI() val conf = new SparkConfWithEnv(Map("SPARK_PUBLIC_DNS" -> SPARK_PUBLIC_DNS)).set( "spark.extraListeners", classOf[SaveExecutorInfo].getName) sc = new SparkContext("local-cluster[2,1,1024]", "test", conf) -- cgit v1.2.3