aboutsummaryrefslogtreecommitdiff
path: root/mllib/src/test
diff options
context:
space:
mode:
authorWeichenXu <WeichenXu123@outlook.com>2016-05-18 11:48:46 +0100
committerSean Owen <sowen@cloudera.com>2016-05-18 11:48:46 +0100
commit2f9047b5eb969e0198b8a73e392642ca852ba786 (patch)
tree152fe58ada0fa73a5a5e151b4d0ce188c65be0b5 /mllib/src/test
parent33814f887aea339c99e14ce7f14ca6fcc6875015 (diff)
downloadspark-2f9047b5eb969e0198b8a73e392642ca852ba786.tar.gz
spark-2f9047b5eb969e0198b8a73e392642ca852ba786.tar.bz2
spark-2f9047b5eb969e0198b8a73e392642ca852ba786.zip
[SPARK-15322][MLLIB][CORE][SQL] update deprecate accumulator usage into accumulatorV2 in spark project
## What changes were proposed in this pull request? I use Intellj-IDEA to search usage of deprecate SparkContext.accumulator in the whole spark project, and update the code.(except those test code for accumulator method itself) ## How was this patch tested? Exisiting unit tests Author: WeichenXu <WeichenXu123@outlook.com> Closes #13112 from WeichenXu123/update_accuV2_in_mllib.
Diffstat (limited to 'mllib/src/test')
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/util/StopwatchSuite.scala8
1 files changed, 4 insertions, 4 deletions
diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/StopwatchSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/util/StopwatchSuite.scala
index 9e6bc7193c..141249a427 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/StopwatchSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/util/StopwatchSuite.scala
@@ -60,9 +60,9 @@ class StopwatchSuite extends SparkFunSuite with MLlibTestSparkContext {
test("DistributedStopwatch on executors") {
val sw = new DistributedStopwatch(sc, "sw")
val rdd = sc.parallelize(0 until 4, 4)
- val acc = sc.accumulator(0L)
+ val acc = sc.longAccumulator
rdd.foreach { i =>
- acc += checkStopwatch(sw)
+ acc.add(checkStopwatch(sw))
}
assert(!sw.isRunning)
val elapsed = sw.elapsed()
@@ -88,12 +88,12 @@ class StopwatchSuite extends SparkFunSuite with MLlibTestSparkContext {
assert(sw.toString ===
s"{\n local: ${localElapsed}ms,\n spark: ${sparkElapsed}ms\n}")
val rdd = sc.parallelize(0 until 4, 4)
- val acc = sc.accumulator(0L)
+ val acc = sc.longAccumulator
rdd.foreach { i =>
sw("local").start()
val duration = checkStopwatch(sw("spark"))
sw("local").stop()
- acc += duration
+ acc.add(duration)
}
val localElapsed2 = sw("local").elapsed()
assert(localElapsed2 === localElapsed)