aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDevaraj K <devaraj@apache.org>2016-02-25 12:18:43 +0000
committerSean Owen <sowen@cloudera.com>2016-02-25 12:18:43 +0000
commit2e44031fafdb8cf486573b98e4faa6b31ffb90a4 (patch)
treeb0103fc8d179b77db53df6701b7c49c54c17cbe5
parent2b2c8c33236677c916541f956f7b94bba014a9ce (diff)
downloadspark-2e44031fafdb8cf486573b98e4faa6b31ffb90a4.tar.gz
spark-2e44031fafdb8cf486573b98e4faa6b31ffb90a4.tar.bz2
spark-2e44031fafdb8cf486573b98e4faa6b31ffb90a4.zip
[SPARK-13117][WEB UI] WebUI should use the local ip not 0.0.0.0
Fixed the HTTP Server Host Name/IP issue i.e. HTTP Server to take the configured host name/IP and not '0.0.0.0' always. Author: Devaraj K <devaraj@apache.org> Closes #11133 from devaraj-kavali/SPARK-13117.
-rw-r--r--core/src/main/scala/org/apache/spark/ui/WebUI.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala3
2 files changed, 3 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
index fe4949b9f6..e515916f31 100644
--- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala
@@ -134,7 +134,7 @@ private[spark] abstract class WebUI(
def bind() {
assert(!serverInfo.isDefined, "Attempted to bind %s more than once!".format(className))
try {
- serverInfo = Some(startJettyServer("0.0.0.0", port, sslOptions, handlers, conf, name))
+ serverInfo = Some(startJettyServer(publicHostName, port, sslOptions, handlers, conf, name))
logInfo("Started %s at http://%s:%d".format(className, publicHostName, boundPort))
} catch {
case e: Exception =>
diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
index f416ace5c2..972b552f7a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
@@ -26,6 +26,7 @@ import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSui
import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.SparkConfWithEnv
+import org.apache.spark.util.Utils
class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
@@ -53,7 +54,7 @@ class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
}
test("verify that log urls reflect SPARK_PUBLIC_DNS (SPARK-6175)") {
- val SPARK_PUBLIC_DNS = "public_dns"
+ val SPARK_PUBLIC_DNS = Utils.localHostNameForURI()
val conf = new SparkConfWithEnv(Map("SPARK_PUBLIC_DNS" -> SPARK_PUBLIC_DNS)).set(
"spark.extraListeners", classOf[SaveExecutorInfo].getName)
sc = new SparkContext("local-cluster[2,1,1024]", "test", conf)