diff options
author | Josh Rosen <joshrosen@databricks.com> | 2016-09-11 21:51:22 -0700 |
---|---|---|
committer | Shixiong Zhu <shixiong@databricks.com> | 2016-09-11 21:51:22 -0700 |
commit | 72eec70bdbf6fb67c977463db5d8d95dd3040ae8 (patch) | |
tree | fa064bf46fae4e25be130a44d29f0a56527a6713 /core/src/main | |
parent | 767d48076971f6f1e2c93ee540a9b2e5e465631b (diff) | |
download | spark-72eec70bdbf6fb67c977463db5d8d95dd3040ae8.tar.gz spark-72eec70bdbf6fb67c977463db5d8d95dd3040ae8.tar.bz2 spark-72eec70bdbf6fb67c977463db5d8d95dd3040ae8.zip |
[SPARK-17486] Remove unused TaskMetricsUIData.updatedBlockStatuses field
The `TaskMetricsUIData.updatedBlockStatuses` field is assigned to but never read, increasing the memory consumption of the web UI. We should remove this field.
Author: Josh Rosen <joshrosen@databricks.com>
Closes #15038 from JoshRosen/remove-updated-block-statuses-from-TaskMetricsUIData.
Diffstat (limited to 'core/src/main')
-rw-r--r-- | core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala | 3 |
1 files changed, 0 insertions, 3 deletions
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala b/core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala index 66b88129ee..74bca9931a 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala @@ -23,7 +23,6 @@ import scala.collection.mutable.{HashMap, LinkedHashMap} import org.apache.spark.JobExecutionStatus import org.apache.spark.executor.{ShuffleReadMetrics, ShuffleWriteMetrics, TaskMetrics} import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo} -import org.apache.spark.storage.{BlockId, BlockStatus} import org.apache.spark.util.AccumulatorContext import org.apache.spark.util.collection.OpenHashSet @@ -145,7 +144,6 @@ private[spark] object UIData { memoryBytesSpilled = m.memoryBytesSpilled, diskBytesSpilled = m.diskBytesSpilled, peakExecutionMemory = m.peakExecutionMemory, - updatedBlockStatuses = m.updatedBlockStatuses.toList, inputMetrics = InputMetricsUIData(m.inputMetrics.bytesRead, m.inputMetrics.recordsRead), outputMetrics = OutputMetricsUIData(m.outputMetrics.bytesWritten, m.outputMetrics.recordsWritten), @@ -193,7 +191,6 @@ private[spark] object UIData { memoryBytesSpilled: Long, diskBytesSpilled: Long, peakExecutionMemory: Long, - updatedBlockStatuses: Seq[(BlockId, BlockStatus)], inputMetrics: InputMetricsUIData, outputMetrics: OutputMetricsUIData, shuffleReadMetrics: ShuffleReadMetricsUIData, |