aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-09-07 00:34:12 -0400
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-09-08 00:29:11 -0700
commit651a96adf7b53085bd810e153f8eabf52eed1994 (patch)
tree70e9c70470c93c4630de0f958eaed4b98706d2ba /core/src/test
parent98fb69822cf780160bca51abeaab7c82e49fab54 (diff)
downloadspark-651a96adf7b53085bd810e153f8eabf52eed1994.tar.gz
spark-651a96adf7b53085bd810e153f8eabf52eed1994.tar.bz2
spark-651a96adf7b53085bd810e153f8eabf52eed1994.zip
More fair scheduler docs and property names.
Also changed uses of "job" terminology to "application" when they referred to an entire Spark program, to avoid confusion.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala4
2 files changed, 5 insertions, 5 deletions
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
index 92ad9f09b2..2b0d90e748 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
@@ -166,7 +166,7 @@ class ClusterSchedulerSuite extends FunSuite with LocalSparkContext with Logging
val taskSet = new TaskSet(tasks.toArray,0,0,0,null)
val xmlPath = getClass.getClassLoader.getResource("fairscheduler.xml").getFile()
- System.setProperty("spark.fairscheduler.allocation.file", xmlPath)
+ System.setProperty("spark.scheduler.allocation.file", xmlPath)
val rootPool = new Pool("", SchedulingMode.FAIR, 0, 0)
val schedulableBuilder = new FairSchedulableBuilder(rootPool)
schedulableBuilder.buildPools()
@@ -183,9 +183,9 @@ class ClusterSchedulerSuite extends FunSuite with LocalSparkContext with Logging
assert(rootPool.getSchedulableByName("3").weight === 1)
val properties1 = new Properties()
- properties1.setProperty("spark.scheduler.cluster.fair.pool","1")
+ properties1.setProperty("spark.scheduler.pool","1")
val properties2 = new Properties()
- properties2.setProperty("spark.scheduler.cluster.fair.pool","2")
+ properties2.setProperty("spark.scheduler.pool","2")
val taskSetManager10 = createDummyTaskSetManager(1, 0, 1, clusterScheduler, taskSet)
val taskSetManager11 = createDummyTaskSetManager(1, 1, 1, clusterScheduler, taskSet)
diff --git a/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
index ca9c590a7d..af76c843e8 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
@@ -73,7 +73,7 @@ class LocalSchedulerSuite extends FunSuite with LocalSparkContext {
TaskThreadInfo.threadToStarted(threadIndex) = new CountDownLatch(1)
new Thread {
if (poolName != null) {
- sc.setLocalProperty("spark.scheduler.cluster.fair.pool", poolName)
+ sc.setLocalProperty("spark.scheduler.pool", poolName)
}
override def run() {
val ans = nums.map(number => {
@@ -152,7 +152,7 @@ class LocalSchedulerSuite extends FunSuite with LocalSparkContext {
val sem = new Semaphore(0)
System.setProperty("spark.scheduler.mode", "FAIR")
val xmlPath = getClass.getClassLoader.getResource("fairscheduler.xml").getFile()
- System.setProperty("spark.fairscheduler.allocation.file", xmlPath)
+ System.setProperty("spark.scheduler.allocation.file", xmlPath)
createThread(10,"1",sc,sem)
TaskThreadInfo.threadToStarted(10).await()