aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2016-08-13 15:40:43 -0700
committerReynold Xin <rxin@databricks.com>2016-08-13 15:40:43 -0700
commitcdaa562c9a09e2e83e6df4e84d911ce1428a7a7c (patch)
tree723a678a14fe4ba4f5f9a12bd3488c5484416bad /sql/core
parent67f025d90e6ba8c039ff45e26d34f20d24b92e6a (diff)
downloadspark-cdaa562c9a09e2e83e6df4e84d911ce1428a7a7c.tar.gz
spark-cdaa562c9a09e2e83e6df4e84d911ce1428a7a7c.tar.bz2
spark-cdaa562c9a09e2e83e6df4e84d911ce1428a7a7c.zip
[SPARK-16966][SQL][CORE] App Name is a randomUUID even when "spark.app.name" exists
## What changes were proposed in this pull request? Don't override app name specified in `SparkConf` with a random app name. Only set it if the conf has no app name even after options have been applied. See also https://github.com/apache/spark/pull/14602 This is similar to Sherry302 's original proposal in https://github.com/apache/spark/pull/14556 ## How was this patch tested? Jenkins test, with new case reproducing the bug Author: Sean Owen <sowen@cloudera.com> Closes #14630 from srowen/SPARK-16966.2.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala11
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala1
2 files changed, 8 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 2ade36d075..362bf45d03 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -816,16 +816,19 @@ object SparkSession {
// No active nor global default session. Create a new one.
val sparkContext = userSuppliedContext.getOrElse {
// set app name if not given
- if (!options.contains("spark.app.name")) {
- options += "spark.app.name" -> java.util.UUID.randomUUID().toString
- }
-
+ val randomAppName = java.util.UUID.randomUUID().toString
val sparkConf = new SparkConf()
options.foreach { case (k, v) => sparkConf.set(k, v) }
+ if (!sparkConf.contains("spark.app.name")) {
+ sparkConf.setAppName(randomAppName)
+ }
val sc = SparkContext.getOrCreate(sparkConf)
// maybe this is an existing SparkContext, update its SparkConf which maybe used
// by SparkSession
options.foreach { case (k, v) => sc.conf.set(k, v) }
+ if (!sc.conf.contains("spark.app.name")) {
+ sc.conf.setAppName(randomAppName)
+ }
sc
}
session = new SparkSession(sparkContext)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 418345b9ee..386d13d07a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -100,6 +100,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
assert(session.conf.get("key2") == "value2")
assert(session.sparkContext.conf.get("key1") == "value1")
assert(session.sparkContext.conf.get("key2") == "value2")
+ assert(session.sparkContext.conf.get("spark.app.name") == "test")
session.stop()
}