aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2013-12-29 22:19:33 -0500
committerMatei Zaharia <matei@databricks.com>2013-12-29 22:19:33 -0500
commit994f080f8ae3372366e6004600ba791c8a372ff0 (patch)
tree2b4ef5363c5a881dd98e98ca9eecd3c3d5f57371 /core
parenteaa8a68ff08304f713f4f75d39c61c020e0e691d (diff)
downloadspark-994f080f8ae3372366e6004600ba791c8a372ff0.tar.gz
spark-994f080f8ae3372366e6004600ba791c8a372ff0.tar.bz2
spark-994f080f8ae3372366e6004600ba791c8a372ff0.zip
Properly show Spark properties on web UI, and change app name property
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala13
-rw-r--r--core/src/test/scala/org/apache/spark/SparkConfSuite.scala2
4 files changed, 12 insertions, 9 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 8cecaff5dd..ae52de409e 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -54,7 +54,7 @@ class SparkConf(loadDefaults: Boolean) extends Serializable with Cloneable {
/** Set a name for your application. Shown in the Spark web UI. */
def setAppName(name: String): SparkConf = {
if (name != null) {
- settings("spark.appName") = name
+ settings("spark.app.name") = name
}
this
}
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 6f54fa7a5a..810ed1860b 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -112,7 +112,7 @@ class SparkContext(
if (!conf.contains("spark.master")) {
throw new SparkException("A master URL must be set in your configuration")
}
- if (!conf.contains("spark.appName")) {
+ if (!conf.contains("spark.app.name")) {
throw new SparkException("An application must be set in your configuration")
}
@@ -127,7 +127,7 @@ class SparkContext(
}
val master = conf.get("spark.master")
- val appName = conf.get("spark.appName")
+ val appName = conf.get("spark.app.name")
val isLocal = (master == "local" || master.startsWith("local["))
diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index 91fa00a66c..6b4602f928 100644
--- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -48,12 +48,15 @@ private[spark] class EnvironmentUI(sc: SparkContext) {
def jvmTable =
UIUtils.listingTable(Seq("Name", "Value"), jvmRow, jvmInformation, fixedWidth = true)
- val properties = System.getProperties.iterator.toSeq
- val classPathProperty = properties.find { case (k, v) =>
- k.contains("java.class.path")
+ val sparkProperties = sc.conf.getAll.sorted
+
+ val systemProperties = System.getProperties.iterator.toSeq
+ val classPathProperty = systemProperties.find { case (k, v) =>
+ k == "java.class.path"
}.getOrElse(("", ""))
- val sparkProperties = properties.filter(_._1.startsWith("spark")).sorted
- val otherProperties = properties.diff(sparkProperties :+ classPathProperty).sorted
+ val otherProperties = systemProperties.filter { case (k, v) =>
+ k != "java.class.path" && !k.startsWith("spark.")
+ }.sorted
val propertyHeaders = Seq("Name", "Value")
def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index aaf0b80fe9..77c7b829b3 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -46,7 +46,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext {
conf.setExecutorEnv(Seq(("VAR2", "value2"), ("VAR3", "value3")))
assert(conf.get("spark.master") === "local[3]")
- assert(conf.get("spark.appName") === "My app")
+ assert(conf.get("spark.app.name") === "My app")
assert(conf.get("spark.home") === "/path")
assert(conf.get("spark.jars") === "a.jar,b.jar")
assert(conf.get("spark.executorEnv.VAR1") === "value1")