aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala/org/apache
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/test/scala/org/apache')
-rw-r--r--core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/PartitioningSuite.scala6
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala4
4 files changed, 9 insertions, 9 deletions
diff --git a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
index 939f12f94f..b9d18119b5 100644
--- a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
@@ -30,11 +30,11 @@ class ImplicitOrderingSuite extends SparkFunSuite with LocalSparkContext {
// Infer orderings after basic maps to particular types
val basicMapExpectations = ImplicitOrderingSuite.basicMapExpectations(rdd)
- basicMapExpectations.map({case (met, explain) => assert(met, explain)})
+ basicMapExpectations.foreach { case (met, explain) => assert(met, explain) }
// Infer orderings for other RDD methods
val otherRDDMethodExpectations = ImplicitOrderingSuite.otherRDDMethodExpectations(rdd)
- otherRDDMethodExpectations.map({case (met, explain) => assert(met, explain)})
+ otherRDDMethodExpectations.foreach { case (met, explain) => assert(met, explain) }
}
}
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index c5d4968ef7..34c017806f 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -71,9 +71,9 @@ class PartitioningSuite extends SparkFunSuite with SharedSparkContext with Priva
val partitionSizes = List(1, 2, 10, 100, 500, 1000, 1500)
val partitioners = partitionSizes.map(p => (p, new RangePartitioner(p, rdd)))
val decoratedRangeBounds = PrivateMethod[Array[Int]]('rangeBounds)
- partitioners.map { case (numPartitions, partitioner) =>
+ partitioners.foreach { case (numPartitions, partitioner) =>
val rangeBounds = partitioner.invokePrivate(decoratedRangeBounds())
- 1.to(1000).map { element => {
+ for (element <- 1 to 1000) {
val partition = partitioner.getPartition(element)
if (numPartitions > 1) {
if (partition < rangeBounds.size) {
@@ -85,7 +85,7 @@ class PartitioningSuite extends SparkFunSuite with SharedSparkContext with Priva
} else {
assert(partition === 0)
}
- }}
+ }
}
}
diff --git a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
index f8d523fa2c..59b90974ae 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
@@ -96,7 +96,7 @@ class PipedRDDSuite extends SparkFunSuite with SharedSparkContext {
val piped = nums.pipe(Seq("cat"),
Map[String, String](),
(f: String => Unit) => {
- bl.value.map(f(_)); f("\u0001")
+ bl.value.foreach(f); f("\u0001")
},
(i: Int, f: String => Unit) => f(i + "_"))
@@ -117,7 +117,7 @@ class PipedRDDSuite extends SparkFunSuite with SharedSparkContext {
pipe(Seq("cat"),
Map[String, String](),
(f: String => Unit) => {
- bl.value.map(f(_)); f("\u0001")
+ bl.value.foreach(f); f("\u0001")
},
(i: Tuple2[String, Iterable[String]], f: String => Unit) => {
for (e <- i._2) {
diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
index 25fc15dd54..fd9add7690 100644
--- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
@@ -171,8 +171,8 @@ class TimeStampedHashMapSuite extends SparkFunSuite {
})
test(name + " - threading safety test") {
- threads.map(_.start)
- threads.map(_.join)
+ threads.foreach(_.start())
+ threads.foreach(_.join())
assert(!error)
}
}