aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorAllen <yufan_1990@163.com>2016-05-01 15:39:14 +0100
committerSean Owen <sowen@cloudera.com>2016-05-01 15:39:14 +0100
commitcdf9e9753df4e7f2fa4e972d1bfded4e22943c27 (patch)
tree47e942be554ec9077877cb269b06bbeb860541bc /core/src/test
parent90787de864b58a1079c23e6581381ca8ffe7685f (diff)
downloadspark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.tar.gz
spark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.tar.bz2
spark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.zip
[SPARK-14505][CORE] Fix bug : creating two SparkContext objects in the same jvm, the first one will can not run any task!
After creating two SparkContext objects in the same jvm(the second one can not be created successfully!), use the first one to run job will throw exception like below: ![image](https://cloud.githubusercontent.com/assets/7162889/14402832/0c8da2a6-fe73-11e5-8aba-68ee3ddaf605.png) Author: Allen <yufan_1990@163.com> Closes #12273 from the-sea/context-create-bug.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSuite.scala4
1 files changed, 4 insertions, 0 deletions
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 841fd02ae8..a759f364fe 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -39,8 +39,12 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
val conf = new SparkConf().setAppName("test").setMaster("local")
.set("spark.driver.allowMultipleContexts", "false")
sc = new SparkContext(conf)
+ val envBefore = SparkEnv.get
// A SparkContext is already running, so we shouldn't be able to create a second one
intercept[SparkException] { new SparkContext(conf) }
+ val envAfter = SparkEnv.get
+ // SparkEnv and other context variables should be the same
+ assert(envBefore == envAfter)
// After stopping the running context, we should be able to create a new one
resetSparkContext()
sc = new SparkContext(conf)