aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorHemant Bhanawat <hemant@snappydata.io>2016-04-27 10:59:23 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-04-27 10:59:23 -0700
commite4d439c831a7fe3dbfeff6ce029c3ce1f9420ab8 (patch)
tree53939ca7c100561dd3b20c2eb5f9ddf128e6c395 /core/src/test
parent607f50341c8d86f0034f3aae69a55f25d55a012e (diff)
downloadspark-e4d439c831a7fe3dbfeff6ce029c3ce1f9420ab8.tar.gz
spark-e4d439c831a7fe3dbfeff6ce029c3ce1f9420ab8.tar.bz2
spark-e4d439c831a7fe3dbfeff6ce029c3ce1f9420ab8.zip
[SPARK-14729][SCHEDULER] Refactored YARN scheduler creation code to use newly added ExternalClusterManager
## What changes were proposed in this pull request? With the addition of ExternalClusterManager(ECM) interface in PR #11723, any cluster manager can now be integrated with Spark. It was suggested in ExternalClusterManager PR that one of the existing cluster managers should start using the new interface to ensure that the API is correct. Ideally, all the existing cluster managers should eventually use the ECM interface but as a first step yarn will now use the ECM interface. This PR refactors YARN code from SparkContext.createTaskScheduler function into YarnClusterManager that implements ECM interface. ## How was this patch tested? Since this is refactoring, no new tests has been added. Existing tests have been run. Basic manual testing with YARN was done too. Author: Hemant Bhanawat <hemant@snappydata.io> Closes #12641 from hbhanawat/yarnClusterMgr.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala20
1 files changed, 0 insertions, 20 deletions
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index 49c2bf6bca..213d70f4e5 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -129,26 +129,6 @@ class SparkContextSchedulerCreationSuite
}
}
- def testYarn(master: String, deployMode: String, expectedClassName: String) {
- try {
- val sched = createTaskScheduler(master, deployMode)
- assert(sched.getClass === Utils.classForName(expectedClassName))
- } catch {
- case e: SparkException =>
- assert(e.getMessage.contains("YARN mode not available"))
- logWarning("YARN not available, could not test actual YARN scheduler creation")
- case e: Throwable => fail(e)
- }
- }
-
- test("yarn-cluster") {
- testYarn("yarn", "cluster", "org.apache.spark.scheduler.cluster.YarnClusterScheduler")
- }
-
- test("yarn-client") {
- testYarn("yarn", "client", "org.apache.spark.scheduler.cluster.YarnScheduler")
- }
-
def testMesos(master: String, expectedClass: Class[_], coarse: Boolean) {
val conf = new SparkConf().set("spark.mesos.coarse", coarse.toString)
try {