aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-06-10 11:08:39 -0700
committerReynold Xin <rxin@databricks.com>2016-06-10 11:08:39 -0700
commit254bc8c34e70241508bdfc8ff42a65491f5280cd (patch)
treea4d52308e6460396fce0e44ed092b1acb79d1eb8 /core
parent0ec279ffdf92853965e327a9f0f6956cacb7a23e (diff)
downloadspark-254bc8c34e70241508bdfc8ff42a65491f5280cd.tar.gz
spark-254bc8c34e70241508bdfc8ff42a65491f5280cd.tar.bz2
spark-254bc8c34e70241508bdfc8ff42a65491f5280cd.zip
[SPARK-15866] Rename listAccumulator collectionAccumulator
## What changes were proposed in this pull request? SparkContext.listAccumulator, by Spark's convention, makes it sound like "list" is a verb and the method should return a list of accumulators. This patch renames the method and the class collection accumulator. ## How was this patch tested? Updated test case to reflect the names. Author: Reynold Xin <rxin@databricks.com> Closes #13594 from rxin/SPARK-15866.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala16
-rw-r--r--core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala15
-rw-r--r--core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala2
3 files changed, 19 insertions, 14 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 33b11ed2e6..230fabd211 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1340,21 +1340,21 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
}
/**
- * Create and register a list accumulator, which starts with empty list and accumulates inputs
- * by adding them into the inner list.
+ * Create and register a [[CollectionAccumulator]], which starts with empty list and accumulates
+ * inputs by adding them into the list.
*/
- def listAccumulator[T]: ListAccumulator[T] = {
- val acc = new ListAccumulator[T]
+ def collectionAccumulator[T]: CollectionAccumulator[T] = {
+ val acc = new CollectionAccumulator[T]
register(acc)
acc
}
/**
- * Create and register a list accumulator, which starts with empty list and accumulates inputs
- * by adding them into the inner list.
+ * Create and register a [[CollectionAccumulator]], which starts with empty list and accumulates
+ * inputs by adding them into the list.
*/
- def listAccumulator[T](name: String): ListAccumulator[T] = {
- val acc = new ListAccumulator[T]
+ def collectionAccumulator[T](name: String): CollectionAccumulator[T] = {
+ val acc = new CollectionAccumulator[T]
register(acc, name)
acc
}
diff --git a/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala b/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
index 0b9a47c112..044dd69cc9 100644
--- a/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
+++ b/core/src/main/scala/org/apache/spark/util/AccumulatorV2.scala
@@ -415,15 +415,20 @@ class DoubleAccumulator extends AccumulatorV2[jl.Double, jl.Double] {
}
-class ListAccumulator[T] extends AccumulatorV2[T, java.util.List[T]] {
+/**
+ * An [[AccumulatorV2 accumulator]] for collecting a list of elements.
+ *
+ * @since 2.0.0
+ */
+class CollectionAccumulator[T] extends AccumulatorV2[T, java.util.List[T]] {
private val _list: java.util.List[T] = new ArrayList[T]
override def isZero: Boolean = _list.isEmpty
- override def copyAndReset(): ListAccumulator[T] = new ListAccumulator
+ override def copyAndReset(): CollectionAccumulator[T] = new CollectionAccumulator
- override def copy(): ListAccumulator[T] = {
- val newAcc = new ListAccumulator[T]
+ override def copy(): CollectionAccumulator[T] = {
+ val newAcc = new CollectionAccumulator[T]
newAcc._list.addAll(_list)
newAcc
}
@@ -433,7 +438,7 @@ class ListAccumulator[T] extends AccumulatorV2[T, java.util.List[T]] {
override def add(v: T): Unit = _list.add(v)
override def merge(other: AccumulatorV2[T, java.util.List[T]]): Unit = other match {
- case o: ListAccumulator[T] => _list.addAll(o.value)
+ case o: CollectionAccumulator[T] => _list.addAll(o.value)
case _ => throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
diff --git a/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala b/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
index 439da1306f..a04644d57e 100644
--- a/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
+++ b/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
@@ -88,7 +88,7 @@ class AccumulatorV2Suite extends SparkFunSuite {
}
test("ListAccumulator") {
- val acc = new ListAccumulator[Double]
+ val acc = new CollectionAccumulator[Double]
assert(acc.value.isEmpty)
assert(acc.isZero)