aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org/apache
diff options
context:
space:
mode:
authorXin Ren <iamshrek@126.com>2016-08-30 11:24:55 +0100
committerSean Owen <sowen@cloudera.com>2016-08-30 11:24:55 +0100
commit27209252f09ff73c58e60c6df8aaba73b308088c (patch)
tree23eb0a48d34fab230d8a48a0f0303299ace47c0d /core/src/test/scala/org/apache
parentd4eee9932edf1a489d7fe9120a0f003150834df6 (diff)
downloadspark-27209252f09ff73c58e60c6df8aaba73b308088c.tar.gz
spark-27209252f09ff73c58e60c6df8aaba73b308088c.tar.bz2
spark-27209252f09ff73c58e60c6df8aaba73b308088c.zip
[MINOR][MLLIB][SQL] Clean up unused variables and unused import
## What changes were proposed in this pull request? Clean up unused variables and unused import statements, unnecessary `return` and `toArray`, and some more style improvement, when I walk through the code examples. ## How was this patch tested? Testet manually on local laptop. Author: Xin Ren <iamshrek@126.com> Closes #14836 from keypointt/codeWalkThroughML.
Diffstat (limited to 'core/src/test/scala/org/apache')
-rw-r--r--core/src/test/scala/org/apache/spark/AccumulatorSuite.scala6
1 files changed, 4 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 6cbd5ae5d4..6d03ee091e 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -100,7 +100,9 @@ class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContex
val acc: Accumulator[Int] = sc.accumulator(0)
val d = sc.parallelize(1 to 20)
- an [Exception] should be thrownBy {d.foreach{x => acc.value = x}}
+ intercept[SparkException] {
+ d.foreach(x => acc.value = x)
+ }
}
test ("add value to collection accumulators") {
@@ -171,7 +173,7 @@ class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContex
d.foreach {
x => acc.localValue ++= x
}
- acc.value should be ( (0 to maxI).toSet)
+ acc.value should be ((0 to maxI).toSet)
resetSparkContext()
}
}