aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAllen <yufan_1990@163.com>2016-05-01 15:39:14 +0100
committerSean Owen <sowen@cloudera.com>2016-05-01 15:39:14 +0100
commitcdf9e9753df4e7f2fa4e972d1bfded4e22943c27 (patch)
tree47e942be554ec9077877cb269b06bbeb860541bc /core
parent90787de864b58a1079c23e6581381ca8ffe7685f (diff)
downloadspark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.tar.gz
spark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.tar.bz2
spark-cdf9e9753df4e7f2fa4e972d1bfded4e22943c27.zip
[SPARK-14505][CORE] Fix bug : creating two SparkContext objects in the same jvm, the first one will can not run any task!
After creating two SparkContext objects in the same jvm(the second one can not be created successfully!), use the first one to run job will throw exception like below: ![image](https://cloud.githubusercontent.com/assets/7162889/14402832/0c8da2a6-fe73-11e5-8aba-68ee3ddaf605.png) Author: Allen <yufan_1990@163.com> Closes #12273 from the-sea/context-create-bug.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala27
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSuite.scala4
2 files changed, 16 insertions, 15 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index ed4408cc5a..2cb3ed0296 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -2216,21 +2216,7 @@ object SparkContext extends Logging {
sc: SparkContext,
allowMultipleContexts: Boolean): Unit = {
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
- contextBeingConstructed.foreach { otherContext =>
- if (otherContext ne sc) { // checks for reference equality
- // Since otherContext might point to a partially-constructed context, guard against
- // its creationSite field being null:
- val otherContextCreationSite =
- Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location")
- val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" +
- " constructor). This may indicate an error, since only one SparkContext may be" +
- " running in this JVM (see SPARK-2243)." +
- s" The other SparkContext was created at:\n$otherContextCreationSite"
- logWarning(warnMsg)
- }
-
- if (activeContext.get() != null) {
- val ctx = activeContext.get()
+ Option(activeContext.get()).filter(_ ne sc).foreach { ctx =>
val errMsg = "Only one SparkContext may be running in this JVM (see SPARK-2243)." +
" To ignore this error, set spark.driver.allowMultipleContexts = true. " +
s"The currently running SparkContext was created at:\n${ctx.creationSite.longForm}"
@@ -2241,6 +2227,17 @@ object SparkContext extends Logging {
throw exception
}
}
+
+ contextBeingConstructed.filter(_ ne sc).foreach { otherContext =>
+ // Since otherContext might point to a partially-constructed context, guard against
+ // its creationSite field being null:
+ val otherContextCreationSite =
+ Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location")
+ val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" +
+ " constructor). This may indicate an error, since only one SparkContext may be" +
+ " running in this JVM (see SPARK-2243)." +
+ s" The other SparkContext was created at:\n$otherContextCreationSite"
+ logWarning(warnMsg)
}
}
}
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 841fd02ae8..a759f364fe 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -39,8 +39,12 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
val conf = new SparkConf().setAppName("test").setMaster("local")
.set("spark.driver.allowMultipleContexts", "false")
sc = new SparkContext(conf)
+ val envBefore = SparkEnv.get
// A SparkContext is already running, so we shouldn't be able to create a second one
intercept[SparkException] { new SparkContext(conf) }
+ val envAfter = SparkEnv.get
+ // SparkEnv and other context variables should be the same
+ assert(envBefore == envAfter)
// After stopping the running context, we should be able to create a new one
resetSparkContext()
sc = new SparkContext(conf)