aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorImran Rashid <imran@quantifind.com>2013-01-30 18:51:14 -0800
committerImran Rashid <imran@quantifind.com>2013-01-30 18:51:14 -0800
commitc1df24d0850b0ac89f35f1a47ce6b2fb5b95df0a (patch)
tree29824d494fcaa786df0d8dd79aa8c01565c69296
parentb92259ba570c0fc5d8a5c5acd59d4399398e380e (diff)
downloadspark-c1df24d0850b0ac89f35f1a47ce6b2fb5b95df0a.tar.gz
spark-c1df24d0850b0ac89f35f1a47ce6b2fb5b95df0a.tar.bz2
spark-c1df24d0850b0ac89f35f1a47ce6b2fb5b95df0a.zip
rename Slaves --> Executor
-rw-r--r--core/src/main/scala/spark/SparkContext.scala6
-rw-r--r--core/src/main/scala/spark/storage/BlockManagerUI.scala4
2 files changed, 5 insertions, 5 deletions
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index a09eca1dd0..39e3555de8 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -468,7 +468,7 @@ class SparkContext(
* Return a map from the slave to the max memory available for caching and the remaining
* memory available for caching.
*/
- def getSlavesMemoryStatus: Map[String, (Long, Long)] = {
+ def getExecutorMemoryStatus: Map[String, (Long, Long)] = {
env.blockManager.master.getMemoryStatus.map { case(blockManagerId, mem) =>
(blockManagerId.ip + ":" + blockManagerId.port, mem)
}
@@ -479,13 +479,13 @@ class SparkContext(
* they take, etc.
*/
def getRDDStorageInfo : Array[RDDInfo] = {
- StorageUtils.rddInfoFromStorageStatus(getSlavesStorageStatus, this)
+ StorageUtils.rddInfoFromStorageStatus(getExecutorStorageStatus, this)
}
/**
* Return information about blocks stored in all of the slaves
*/
- def getSlavesStorageStatus : Array[StorageStatus] = {
+ def getExecutorStorageStatus : Array[StorageStatus] = {
env.blockManager.master.getStorageStatus
}
diff --git a/core/src/main/scala/spark/storage/BlockManagerUI.scala b/core/src/main/scala/spark/storage/BlockManagerUI.scala
index 52f6d1b657..9e6721ec17 100644
--- a/core/src/main/scala/spark/storage/BlockManagerUI.scala
+++ b/core/src/main/scala/spark/storage/BlockManagerUI.scala
@@ -45,7 +45,7 @@ class BlockManagerUI(val actorSystem: ActorSystem, blockManagerMaster: ActorRef,
path("") {
completeWith {
// Request the current storage status from the Master
- val storageStatusList = sc.getSlavesStorageStatus
+ val storageStatusList = sc.getExecutorStorageStatus
// Calculate macro-level statistics
val maxMem = storageStatusList.map(_.maxMem).reduce(_+_)
val remainingMem = storageStatusList.map(_.memRemaining).reduce(_+_)
@@ -60,7 +60,7 @@ class BlockManagerUI(val actorSystem: ActorSystem, blockManagerMaster: ActorRef,
parameter("id") { id =>
completeWith {
val prefix = "rdd_" + id.toString
- val storageStatusList = sc.getSlavesStorageStatus
+ val storageStatusList = sc.getExecutorStorageStatus
val filteredStorageStatusList = StorageUtils.
filterStorageStatusByPrefix(storageStatusList, prefix)
val rddInfo = StorageUtils.rddInfoFromStorageStatus(filteredStorageStatusList, sc).head