aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2013-08-01 13:37:22 -0700
committerPatrick Wendell <pwendell@gmail.com>2013-08-01 15:00:42 -0700
commitb10199413a1e9193c605dc17de276c78b578e545 (patch)
treeaba4486571f1effebe4ea0478b564cd69be87ba7 /core
parentcfcd77b5da7d8c453b525c1862cba158eddc5170 (diff)
downloadspark-b10199413a1e9193c605dc17de276c78b578e545.tar.gz
spark-b10199413a1e9193c605dc17de276c78b578e545.tar.bz2
spark-b10199413a1e9193c605dc17de276c78b578e545.zip
Slight refactoring to SparkContext functions
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/SparkContext.scala13
-rw-r--r--core/src/main/scala/spark/ui/jobs/PoolTable.scala4
2 files changed, 10 insertions, 7 deletions
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 2e2a699708..97e1aaf49e 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -581,21 +581,24 @@ class SparkContext(
* Return pools for fair scheduler
* TODO(xiajunluan): We should take nested pools into account
*/
- def getPools: ArrayBuffer[Schedulable] = {
+ def getAllPools: ArrayBuffer[Schedulable] = {
taskScheduler.rootPool.schedulableQueue
}
/**
+ * Return the pool associated with the given name, if one exists
+ */
+ def getPoolForName(pool: String): Option[Schedulable] = {
+ taskScheduler.rootPool.schedulableNameToSchedulable.get(pool)
+ }
+
+ /**
* Return current scheduling mode
*/
def getSchedulingMode: SchedulingMode.SchedulingMode = {
taskScheduler.schedulingMode
}
- def getPoolNameToPool: HashMap[String, Schedulable] = {
- taskScheduler.rootPool.schedulableNameToSchedulable
- }
-
/**
* Clear the job's list of files added by `addFile` so that they do not get downloaded to
* any new nodes.
diff --git a/core/src/main/scala/spark/ui/jobs/PoolTable.scala b/core/src/main/scala/spark/ui/jobs/PoolTable.scala
index 8788ed8bc1..29061199df 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolTable.scala
+++ b/core/src/main/scala/spark/ui/jobs/PoolTable.scala
@@ -38,7 +38,7 @@ private[spark] class FIFOSource() extends PoolSource {
*/
private[spark] class FairSource(sc: SparkContext) extends PoolSource {
def getPools: Seq[Schedulable] = {
- sc.getPools.toSeq
+ sc.getAllPools.toSeq
}
}
@@ -48,7 +48,7 @@ private[spark] class FairSource(sc: SparkContext) extends PoolSource {
private[spark] class PoolDetailSource(sc: SparkContext, poolName: String) extends PoolSource {
def getPools: Seq[Schedulable] = {
val pools = HashSet[Schedulable]()
- pools += sc.getPoolNameToPool(poolName)
+ pools += sc.getPoolForName(poolName).get
pools.toSeq
}
}