aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2014-12-10 12:48:24 -0800
committerAndrew Or <andrew@databricks.com>2014-12-10 12:48:24 -0800
commitfaa8fd8178642ef8fce14186abc45a189042efd4 (patch)
tree6b58ebc7819c34ddf8e00e29f671fe3e980e5a97 /core
parent56212831c6436e287a19908e82c26117cbcb16b0 (diff)
downloadspark-faa8fd8178642ef8fce14186abc45a189042efd4.tar.gz
spark-faa8fd8178642ef8fce14186abc45a189042efd4.tar.bz2
spark-faa8fd8178642ef8fce14186abc45a189042efd4.zip
[SPARK-4215] Allow requesting / killing executors only in YARN mode
Currently this doesn't do anything in other modes, so we might as well just disable it rather than having the user mistakenly rely on it. Author: Andrew Or <andrew@databricks.com> Closes #3615 from andrewor14/dynamic-allocation-yarn-only and squashes the following commits: ce6487a [Andrew Or] Allow requesting / killing executors only in YARN mode
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala10
-rw-r--r--core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala1
2 files changed, 10 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index aded7c12e2..8e5378ecc0 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -357,8 +357,12 @@ class SparkContext(config: SparkConf) extends Logging {
}
// Optionally scale number of executors dynamically based on workload. Exposed for testing.
+ private val dynamicAllocationEnabled = conf.getBoolean("spark.dynamicAllocation.enabled", false)
+ private val dynamicAllocationTesting = conf.getBoolean("spark.dynamicAllocation.testing", false)
private[spark] val executorAllocationManager: Option[ExecutorAllocationManager] =
- if (conf.getBoolean("spark.dynamicAllocation.enabled", false)) {
+ if (dynamicAllocationEnabled) {
+ assert(master.contains("yarn") || dynamicAllocationTesting,
+ "Dynamic allocation of executors is currently only supported in YARN mode")
Some(new ExecutorAllocationManager(this))
} else {
None
@@ -989,6 +993,8 @@ class SparkContext(config: SparkConf) extends Logging {
*/
@DeveloperApi
def requestExecutors(numAdditionalExecutors: Int): Boolean = {
+ assert(master.contains("yarn") || dynamicAllocationTesting,
+ "Requesting executors is currently only supported in YARN mode")
schedulerBackend match {
case b: CoarseGrainedSchedulerBackend =>
b.requestExecutors(numAdditionalExecutors)
@@ -1005,6 +1011,8 @@ class SparkContext(config: SparkConf) extends Logging {
*/
@DeveloperApi
def killExecutors(executorIds: Seq[String]): Boolean = {
+ assert(master.contains("yarn") || dynamicAllocationTesting,
+ "Killing executors is currently only supported in YARN mode")
schedulerBackend match {
case b: CoarseGrainedSchedulerBackend =>
b.killExecutors(executorIds)
diff --git a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index ce804f94f3..c817f6dced 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -35,6 +35,7 @@ class ExecutorAllocationManagerSuite extends FunSuite with LocalSparkContext {
.setMaster("local")
.setAppName("test-executor-allocation-manager")
.set("spark.dynamicAllocation.enabled", "true")
+ .set("spark.dynamicAllocation.testing", "true")
intercept[SparkException] { new SparkContext(conf) }
SparkEnv.get.stop() // cleanup the created environment
SparkContext.clearActiveContext()