diff options
author | Sandeep Singh <sandeep@techaddict.me> | 2016-05-03 11:38:43 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-05-03 11:38:43 -0700 |
commit | ca813330c716bed76ac0034c12f56665960a1105 (patch) | |
tree | 26c14bffca2c6e365a9cf26ed7696252c12c6c67 /sql/core/src | |
parent | b545d752195f6dcba4c512b8a1d5bf5b74279dc8 (diff) | |
download | spark-ca813330c716bed76ac0034c12f56665960a1105.tar.gz spark-ca813330c716bed76ac0034c12f56665960a1105.tar.bz2 spark-ca813330c716bed76ac0034c12f56665960a1105.zip |
[SPARK-15087][CORE][SQL] Remove AccumulatorV2.localValue and keep only value
## What changes were proposed in this pull request?
Remove AccumulatorV2.localValue and keep only value
## How was this patch tested?
existing tests
Author: Sandeep Singh <sandeep@techaddict.me>
Closes #12865 from techaddict/SPARK-15087.
Diffstat (limited to 'sql/core/src')
4 files changed, 5 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/SQLMetrics.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/SQLMetrics.scala index 0f68aaaee1..f82e0b8bca 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/SQLMetrics.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/metric/SQLMetrics.scala @@ -34,7 +34,7 @@ class SQLMetric(val metricType: String, initValue: Long = 0L) extends Accumulato override def copyAndReset(): SQLMetric = new SQLMetric(metricType, initValue) override def merge(other: AccumulatorV2[Long, Long]): Unit = other match { - case o: SQLMetric => _value += o.localValue + case o: SQLMetric => _value += o.value case _ => throw new UnsupportedOperationException( s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}") } @@ -45,7 +45,7 @@ class SQLMetric(val metricType: String, initValue: Long = 0L) extends Accumulato def +=(v: Long): Unit = _value += v - override def localValue: Long = _value + override def value: Long = _value // Provide special identifier as metadata so we can tell that this is a `SQLMetric` later private[spark] override def toInfo(update: Option[Any], value: Option[Any]): AccumulableInfo = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala index 9118593c0e..29c54111ea 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLListener.scala @@ -164,7 +164,7 @@ private[sql] class SQLListener(conf: SparkConf) extends SparkListener with Loggi taskEnd.taskInfo.taskId, taskEnd.stageId, taskEnd.stageAttemptId, - taskEnd.taskMetrics.accumulators().map(a => a.toInfo(Some(a.localValue), None)), + taskEnd.taskMetrics.accumulators().map(a => a.toInfo(Some(a.value), None)), finishTask = true) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala index 8de4d8bbd4..d41e88a0aa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala @@ -302,7 +302,7 @@ class SQLMetricsSuite extends SparkFunSuite with SharedSQLContext { test("metrics can be loaded by history server") { val metric = SQLMetrics.createMetric(sparkContext, "zanzibar") metric += 10L - val metricInfo = metric.toInfo(Some(metric.localValue), None) + val metricInfo = metric.toInfo(Some(metric.value), None) metricInfo.update match { case Some(v: Long) => assert(v === 10L) case Some(v) => fail(s"metric value was not a Long: ${v.getClass.getName}") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala index 964787015a..5e08658e5e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLListenerSuite.scala @@ -366,7 +366,7 @@ class SQLListenerSuite extends SparkFunSuite with SharedSQLContext { // The listener should only track the ones that are actually SQL metrics. val sqlMetric = SQLMetrics.createMetric(sparkContext, "beach umbrella") val nonSqlMetric = sparkContext.accumulator[Int](0, "baseball") - val sqlMetricInfo = sqlMetric.toInfo(Some(sqlMetric.localValue), None) + val sqlMetricInfo = sqlMetric.toInfo(Some(sqlMetric.value), None) val nonSqlMetricInfo = nonSqlMetric.toInfo(Some(nonSqlMetric.localValue), None) val taskInfo = createTaskInfo(0, 0) taskInfo.accumulables ++= Seq(sqlMetricInfo, nonSqlMetricInfo) |