From 4c6ade1ad5e083a44b1907bcae3a8bdc738c5331 Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Thu, 15 Aug 2013 15:52:28 -0700 Subject: Rename `memoryBytesToString` and `memoryMegabytesToString` These are used all over the place now and they are not specific to memory at all. memoryBytesToString --> bytesToString memoryMegabytesToString --> megabytesToString --- core/src/main/scala/spark/Utils.scala | 10 +++++----- core/src/main/scala/spark/deploy/master/Master.scala | 2 +- .../main/scala/spark/deploy/master/ui/IndexPage.scala | 10 +++++----- core/src/main/scala/spark/deploy/worker/Worker.scala | 2 +- .../main/scala/spark/deploy/worker/ui/IndexPage.scala | 6 +++--- .../main/scala/spark/deploy/worker/ui/WorkerWebUI.scala | 4 ++-- core/src/main/scala/spark/scheduler/SparkListener.scala | 2 +- .../scheduler/cluster/SparkDeploySchedulerBackend.scala | 2 +- .../main/scala/spark/storage/BlockFetcherIterator.scala | 4 ++-- .../scala/spark/storage/BlockManagerMasterActor.scala | 14 +++++++------- core/src/main/scala/spark/storage/DiskStore.scala | 4 ++-- core/src/main/scala/spark/storage/MemoryStore.scala | 6 +++--- core/src/main/scala/spark/storage/StorageUtils.scala | 4 ++-- core/src/main/scala/spark/ui/exec/ExecutorsUI.scala | 10 +++++----- core/src/main/scala/spark/ui/jobs/StagePage.scala | 10 +++++----- core/src/main/scala/spark/ui/jobs/StageTable.scala | 4 ++-- core/src/main/scala/spark/ui/storage/IndexPage.scala | 4 ++-- core/src/main/scala/spark/ui/storage/RDDPage.scala | 14 +++++++------- core/src/test/scala/spark/UtilsSuite.scala | 16 ++++++++-------- 19 files changed, 64 insertions(+), 64 deletions(-) (limited to 'core') diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/spark/Utils.scala index 673f9a810d..885a7391d6 100644 --- a/core/src/main/scala/spark/Utils.scala +++ b/core/src/main/scala/spark/Utils.scala @@ -521,9 +521,9 @@ private object Utils extends Logging { } /** - * Convert a memory quantity in bytes to a human-readable string such as "4.0 MB". + * Convert a quantity in bytes to a human-readable string such as "4.0 MB". */ - def memoryBytesToString(size: Long): String = { + def bytesToString(size: Long): String = { val TB = 1L << 40 val GB = 1L << 30 val MB = 1L << 20 @@ -566,10 +566,10 @@ private object Utils extends Logging { } /** - * Convert a memory quantity in megabytes to a human-readable string such as "4.0 MB". + * Convert a quantity in megabytes to a human-readable string such as "4.0 MB". */ - def memoryMegabytesToString(megabytes: Long): String = { - memoryBytesToString(megabytes * 1024L * 1024L) + def megabytesToString(megabytes: Long): String = { + bytesToString(megabytes * 1024L * 1024L) } /** diff --git a/core/src/main/scala/spark/deploy/master/Master.scala b/core/src/main/scala/spark/deploy/master/Master.scala index 152cb2887a..04af5e149c 100644 --- a/core/src/main/scala/spark/deploy/master/Master.scala +++ b/core/src/main/scala/spark/deploy/master/Master.scala @@ -96,7 +96,7 @@ private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Act override def receive = { case RegisterWorker(id, host, workerPort, cores, memory, worker_webUiPort, publicAddress) => { logInfo("Registering worker %s:%d with %d cores, %s RAM".format( - host, workerPort, cores, Utils.memoryMegabytesToString(memory))) + host, workerPort, cores, Utils.megabytesToString(memory))) if (idToWorker.contains(id)) { sender ! RegisterWorkerFailed("Duplicate worker ID") } else { diff --git a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala index 4443d88056..1d293ae11c 100644 --- a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala +++ b/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala @@ -70,8 +70,8 @@ private[spark] class IndexPage(parent: MasterWebUI) {
  • Cores: {state.workers.map(_.cores).sum} Total, {state.workers.map(_.coresUsed).sum} Used
  • Memory: - {Utils.memoryMegabytesToString(state.workers.map(_.memory).sum)} Total, - {Utils.memoryMegabytesToString(state.workers.map(_.memoryUsed).sum)} Used
  • + {Utils.megabytesToString(state.workers.map(_.memory).sum)} Total, + {Utils.megabytesToString(state.workers.map(_.memoryUsed).sum)} Used
  • Applications: {state.activeApps.size} Running, {state.completedApps.size} Completed
  • @@ -116,8 +116,8 @@ private[spark] class IndexPage(parent: MasterWebUI) { {worker.state} {worker.cores} ({worker.coresUsed} Used) - {Utils.memoryMegabytesToString(worker.memory)} - ({Utils.memoryMegabytesToString(worker.memoryUsed)} Used) + {Utils.megabytesToString(worker.memory)} + ({Utils.megabytesToString(worker.memoryUsed)} Used) } @@ -135,7 +135,7 @@ private[spark] class IndexPage(parent: MasterWebUI) { {app.coresGranted} - {Utils.memoryMegabytesToString(app.desc.memoryPerSlave)} + {Utils.megabytesToString(app.desc.memoryPerSlave)} {DeployWebUI.formatDate(app.submitDate)} {app.desc.user} diff --git a/core/src/main/scala/spark/deploy/worker/Worker.scala b/core/src/main/scala/spark/deploy/worker/Worker.scala index 0b5013b864..d4b58fc34e 100644 --- a/core/src/main/scala/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/spark/deploy/worker/Worker.scala @@ -96,7 +96,7 @@ private[spark] class Worker( override def preStart() { logInfo("Starting Spark worker %s:%d with %d cores, %s RAM".format( - host, port, cores, Utils.memoryMegabytesToString(memory))) + host, port, cores, Utils.megabytesToString(memory))) sparkHome = new File(Option(System.getenv("SPARK_HOME")).getOrElse(".")) logInfo("Spark home: " + sparkHome) createWorkDir() diff --git a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala b/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala index 700eb22d96..b67059068b 100644 --- a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala +++ b/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala @@ -65,8 +65,8 @@ private[spark] class IndexPage(parent: WorkerWebUI) { Master URL: {workerState.masterUrl}
  • Cores: {workerState.cores} ({workerState.coresUsed} Used)
  • -
  • Memory: {Utils.memoryMegabytesToString(workerState.memory)} - ({Utils.memoryMegabytesToString(workerState.memoryUsed)} Used)
  • +
  • Memory: {Utils.megabytesToString(workerState.memory)} + ({Utils.megabytesToString(workerState.memoryUsed)} Used)
  • Back to Master

    @@ -97,7 +97,7 @@ private[spark] class IndexPage(parent: WorkerWebUI) { {executor.execId} {executor.cores} - {Utils.memoryMegabytesToString(executor.memory)} + {Utils.megabytesToString(executor.memory)} @@ -104,7 +104,7 @@ private[spark] class StagePage(parent: JobProgressUI) { ms => parent.formatDuration(ms.toLong)) def getQuantileCols(data: Seq[Double]) = - Distribution(data).get.getQuantiles().map(d => Utils.memoryBytesToString(d.toLong)) + Distribution(data).get.getQuantiles().map(d => Utils.bytesToString(d.toLong)) val shuffleReadSizes = validTasks.map { case(info, metrics, exception) => @@ -159,11 +159,11 @@ private[spark] class StagePage(parent: JobProgressUI) { {dateFmt.format(new Date(info.launchTime))} {if (shuffleRead) { {metrics.flatMap{m => m.shuffleReadMetrics}.map{s => - Utils.memoryBytesToString(s.remoteBytesRead)}.getOrElse("")} + Utils.bytesToString(s.remoteBytesRead)}.getOrElse("")} }} {if (shuffleWrite) { {metrics.flatMap{m => m.shuffleWriteMetrics}.map{s => - Utils.memoryBytesToString(s.shuffleBytesWritten)}.getOrElse("")} + Utils.bytesToString(s.shuffleBytesWritten)}.getOrElse("")} }} {if (gcTime > 0) parent.formatDuration(gcTime) else ""} diff --git a/core/src/main/scala/spark/ui/jobs/StageTable.scala b/core/src/main/scala/spark/ui/jobs/StageTable.scala index 19b07cceda..bdf8d91958 100644 --- a/core/src/main/scala/spark/ui/jobs/StageTable.scala +++ b/core/src/main/scala/spark/ui/jobs/StageTable.scala @@ -71,11 +71,11 @@ private[spark] class StageTable(val stages: Seq[Stage], val parent: JobProgressU val shuffleRead = listener.stageToShuffleRead.getOrElse(s.id, 0L) match { case 0 => "" - case b => Utils.memoryBytesToString(b) + case b => Utils.bytesToString(b) } val shuffleWrite = listener.stageToShuffleWrite.getOrElse(s.id, 0L) match { case 0 => "" - case b => Utils.memoryBytesToString(b) + case b => Utils.bytesToString(b) } val startedTasks = listener.stageToTasksActive.getOrElse(s.id, HashSet[TaskInfo]()).size diff --git a/core/src/main/scala/spark/ui/storage/IndexPage.scala b/core/src/main/scala/spark/ui/storage/IndexPage.scala index f76192eba8..0751f9e8f9 100644 --- a/core/src/main/scala/spark/ui/storage/IndexPage.scala +++ b/core/src/main/scala/spark/ui/storage/IndexPage.scala @@ -58,8 +58,8 @@ private[spark] class IndexPage(parent: BlockManagerUI) { {rdd.numCachedPartitions} {rdd.numCachedPartitions / rdd.numPartitions.toDouble} - {Utils.memoryBytesToString(rdd.memSize)} - {Utils.memoryBytesToString(rdd.diskSize)} + {Utils.bytesToString(rdd.memSize)} + {Utils.bytesToString(rdd.diskSize)} } } diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/spark/ui/storage/RDDPage.scala index 40f94b42a6..b0071994fe 100644 --- a/core/src/main/scala/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/spark/ui/storage/RDDPage.scala @@ -71,11 +71,11 @@ private[spark] class RDDPage(parent: BlockManagerUI) {
  • Memory Size: - {Utils.memoryBytesToString(rddInfo.memSize)} + {Utils.bytesToString(rddInfo.memSize)}
  • Disk Size: - {Utils.memoryBytesToString(rddInfo.diskSize)} + {Utils.bytesToString(rddInfo.diskSize)}
  • @@ -106,10 +106,10 @@ private[spark] class RDDPage(parent: BlockManagerUI) { {block.storageLevel.description} - {Utils.memoryBytesToString(block.memSize)} + {Utils.bytesToString(block.memSize)} - {Utils.memoryBytesToString(block.diskSize)} + {Utils.bytesToString(block.diskSize)} {locations.map(l => {l}
    )} @@ -122,10 +122,10 @@ private[spark] class RDDPage(parent: BlockManagerUI) { {status.blockManagerId.host + ":" + status.blockManagerId.port} - {Utils.memoryBytesToString(status.memUsed(prefix))} - ({Utils.memoryBytesToString(status.memRemaining)} Remaining) + {Utils.bytesToString(status.memUsed(prefix))} + ({Utils.bytesToString(status.memRemaining)} Remaining) - {Utils.memoryBytesToString(status.diskUsed(prefix))} + {Utils.bytesToString(status.diskUsed(prefix))} } } diff --git a/core/src/test/scala/spark/UtilsSuite.scala b/core/src/test/scala/spark/UtilsSuite.scala index 31c3b25c50..98a6c1a1c9 100644 --- a/core/src/test/scala/spark/UtilsSuite.scala +++ b/core/src/test/scala/spark/UtilsSuite.scala @@ -26,14 +26,14 @@ import scala.util.Random class UtilsSuite extends FunSuite { - test("memoryBytesToString") { - assert(Utils.memoryBytesToString(10) === "10.0 B") - assert(Utils.memoryBytesToString(1500) === "1500.0 B") - assert(Utils.memoryBytesToString(2000000) === "1953.1 KB") - assert(Utils.memoryBytesToString(2097152) === "2.0 MB") - assert(Utils.memoryBytesToString(2306867) === "2.2 MB") - assert(Utils.memoryBytesToString(5368709120L) === "5.0 GB") - assert(Utils.memoryBytesToString(5L * 1024L * 1024L * 1024L * 1024L) === "5.0 TB") + test("bytesToString") { + assert(Utils.bytesToString(10) === "10.0 B") + assert(Utils.bytesToString(1500) === "1500.0 B") + assert(Utils.bytesToString(2000000) === "1953.1 KB") + assert(Utils.bytesToString(2097152) === "2.0 MB") + assert(Utils.bytesToString(2306867) === "2.2 MB") + assert(Utils.bytesToString(5368709120L) === "5.0 GB") + assert(Utils.bytesToString(5L * 1024L * 1024L * 1024L * 1024L) === "5.0 TB") } test("copyStream") { -- cgit v1.2.3