aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-05-10 12:07:47 -0700
committerAndrew Or <andrew@databricks.com>2016-05-10 12:07:47 -0700
commit69641066ae1d35c33b082451cef636a7f2e646d9 (patch)
tree13c43b7b7e3e8662e89cfa0a7c238daa5f0bb3bc /sql
parentdb3b4a20150ff7fb1caaf62ab3d2a2f1e632af36 (diff)
downloadspark-69641066ae1d35c33b082451cef636a7f2e646d9.tar.gz
spark-69641066ae1d35c33b082451cef636a7f2e646d9.tar.bz2
spark-69641066ae1d35c33b082451cef636a7f2e646d9.zip
[SPARK-15037][HOTFIX] Don't create 2 SparkSessions in constructor
## What changes were proposed in this pull request? After #12907 `TestSparkSession` creates a spark session in one of the constructors just to get the `SparkContext` from it. This ends up creating 2 `SparkSession`s from one call, which is definitely not what we want. ## How was this patch tested? Jenkins. Author: Andrew Or <andrew@databricks.com> Closes #13031 from andrewor14/sql-test.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala12
1 files changed, 1 insertions, 11 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala
index 785e3452a8..2f247ca3e8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala
@@ -31,17 +31,7 @@ private[sql] class TestSparkSession(sc: SparkContext) extends SparkSession(sc) {
}
def this() {
- this {
- val conf = new SparkConf()
- conf.set("spark.sql.testkey", "true")
-
- val spark = SparkSession.builder
- .master("local[2]")
- .appName("test-sql-context")
- .config(conf)
- .getOrCreate()
- spark.sparkContext
- }
+ this(new SparkConf)
}
@transient