aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-06-10 11:08:39 -0700
committerReynold Xin <rxin@databricks.com>2016-06-10 11:08:39 -0700
commit254bc8c34e70241508bdfc8ff42a65491f5280cd (patch)
treea4d52308e6460396fce0e44ed092b1acb79d1eb8 /sql/core
parent0ec279ffdf92853965e327a9f0f6956cacb7a23e (diff)
downloadspark-254bc8c34e70241508bdfc8ff42a65491f5280cd.tar.gz
spark-254bc8c34e70241508bdfc8ff42a65491f5280cd.tar.bz2
spark-254bc8c34e70241508bdfc8ff42a65491f5280cd.zip
[SPARK-15866] Rename listAccumulator collectionAccumulator
## What changes were proposed in this pull request? SparkContext.listAccumulator, by Spark's convention, makes it sound like "list" is a verb and the method should return a list of accumulators. This patch renames the method and the class collection accumulator. ## How was this patch tested? Updated test case to reflect the names. Author: Reynold Xin <rxin@databricks.com> Closes #13594 from rxin/SPARK-15866.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala8
1 files changed, 4 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
index a1c2f0a8fb..ff0733102a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
@@ -35,7 +35,7 @@ import org.apache.spark.sql.execution.{LeafExecNode, SparkPlan}
import org.apache.spark.sql.execution.metric.SQLMetrics
import org.apache.spark.sql.types.UserDefinedType
import org.apache.spark.storage.StorageLevel
-import org.apache.spark.util.{AccumulatorContext, ListAccumulator, LongAccumulator}
+import org.apache.spark.util.{AccumulatorContext, CollectionAccumulator, LongAccumulator}
private[sql] object InMemoryRelation {
@@ -67,16 +67,16 @@ private[sql] case class InMemoryRelation(
tableName: Option[String])(
@transient private[sql] var _cachedColumnBuffers: RDD[CachedBatch] = null,
@transient private[sql] var _statistics: Statistics = null,
- private[sql] var _batchStats: ListAccumulator[InternalRow] = null)
+ private[sql] var _batchStats: CollectionAccumulator[InternalRow] = null)
extends logical.LeafNode with MultiInstanceRelation {
override protected def innerChildren: Seq[QueryPlan[_]] = Seq(child)
override def producedAttributes: AttributeSet = outputSet
- private[sql] val batchStats: ListAccumulator[InternalRow] =
+ private[sql] val batchStats: CollectionAccumulator[InternalRow] =
if (_batchStats == null) {
- child.sqlContext.sparkContext.listAccumulator[InternalRow]
+ child.sqlContext.sparkContext.collectionAccumulator[InternalRow]
} else {
_batchStats
}