aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-12-05 08:15:30 +0800
committerReynold Xin <rxin@databricks.com>2015-12-05 08:15:30 +0800
commitb7204e1d41271d2e8443484371770936664350b1 (patch)
tree3b09d003dce3b482282e3ae21b893fe57e607128 /core
parentd64806b37373c5cc4fd158a9f5005743bd00bf28 (diff)
downloadspark-b7204e1d41271d2e8443484371770936664350b1.tar.gz
spark-b7204e1d41271d2e8443484371770936664350b1.tar.bz2
spark-b7204e1d41271d2e8443484371770936664350b1.zip
[SPARK-12112][BUILD] Upgrade to SBT 0.13.9
We should upgrade to SBT 0.13.9, since this is a requirement in order to use SBT's new Maven-style resolution features (which will be done in a separate patch, because it's blocked by some binary compatibility issues in the POM reader plugin). I also upgraded Scalastyle to version 0.8.0, which was necessary in order to fix a Scala 2.10.5 compatibility issue (see https://github.com/scalastyle/scalastyle/issues/156). The newer Scalastyle is slightly stricter about whitespace surrounding tokens, so I fixed the new style violations. Author: Josh Rosen <joshrosen@databricks.com> Closes #10112 from JoshRosen/upgrade-to-sbt-0.13.9.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala10
-rw-r--r--core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala8
3 files changed, 10 insertions, 10 deletions
diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index ccffb36652..220b20bf7c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -45,7 +45,7 @@ private[deploy] object JsonProtocol {
("id" -> obj.id) ~
("name" -> obj.desc.name) ~
("cores" -> obj.desc.maxCores) ~
- ("user" -> obj.desc.user) ~
+ ("user" -> obj.desc.user) ~
("memoryperslave" -> obj.desc.memoryPerExecutorMB) ~
("submitdate" -> obj.submitDate.toString) ~
("state" -> obj.state.toString) ~
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 46ed5c04f4..007a71f87c 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -101,21 +101,21 @@ class RDDSuite extends SparkFunSuite with SharedSparkContext {
}
test("SparkContext.union creates UnionRDD if at least one RDD has no partitioner") {
- val rddWithPartitioner = sc.parallelize(Seq(1->true)).partitionBy(new HashPartitioner(1))
- val rddWithNoPartitioner = sc.parallelize(Seq(2->true))
+ val rddWithPartitioner = sc.parallelize(Seq(1 -> true)).partitionBy(new HashPartitioner(1))
+ val rddWithNoPartitioner = sc.parallelize(Seq(2 -> true))
val unionRdd = sc.union(rddWithNoPartitioner, rddWithPartitioner)
assert(unionRdd.isInstanceOf[UnionRDD[_]])
}
test("SparkContext.union creates PartitionAwareUnionRDD if all RDDs have partitioners") {
- val rddWithPartitioner = sc.parallelize(Seq(1->true)).partitionBy(new HashPartitioner(1))
+ val rddWithPartitioner = sc.parallelize(Seq(1 -> true)).partitionBy(new HashPartitioner(1))
val unionRdd = sc.union(rddWithPartitioner, rddWithPartitioner)
assert(unionRdd.isInstanceOf[PartitionerAwareUnionRDD[_]])
}
test("PartitionAwareUnionRDD raises exception if at least one RDD has no partitioner") {
- val rddWithPartitioner = sc.parallelize(Seq(1->true)).partitionBy(new HashPartitioner(1))
- val rddWithNoPartitioner = sc.parallelize(Seq(2->true))
+ val rddWithPartitioner = sc.parallelize(Seq(1 -> true)).partitionBy(new HashPartitioner(1))
+ val rddWithNoPartitioner = sc.parallelize(Seq(2 -> true))
intercept[IllegalArgumentException] {
new PartitionerAwareUnionRDD(sc, Seq(rddWithNoPartitioner, rddWithPartitioner))
}
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index e428414cf6..f81fe31131 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -144,10 +144,10 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
check(mutable.Map("one" -> 1, "two" -> 2))
check(mutable.HashMap(1 -> "one", 2 -> "two"))
check(mutable.HashMap("one" -> 1, "two" -> 2))
- check(List(Some(mutable.HashMap(1->1, 2->2)), None, Some(mutable.HashMap(3->4))))
+ check(List(Some(mutable.HashMap(1 -> 1, 2 -> 2)), None, Some(mutable.HashMap(3 -> 4))))
check(List(
mutable.HashMap("one" -> 1, "two" -> 2),
- mutable.HashMap(1->"one", 2->"two", 3->"three")))
+ mutable.HashMap(1 -> "one", 2 -> "two", 3 -> "three")))
}
test("Bug: SPARK-10251") {
@@ -174,10 +174,10 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
check(mutable.Map("one" -> 1, "two" -> 2))
check(mutable.HashMap(1 -> "one", 2 -> "two"))
check(mutable.HashMap("one" -> 1, "two" -> 2))
- check(List(Some(mutable.HashMap(1->1, 2->2)), None, Some(mutable.HashMap(3->4))))
+ check(List(Some(mutable.HashMap(1 -> 1, 2 -> 2)), None, Some(mutable.HashMap(3 -> 4))))
check(List(
mutable.HashMap("one" -> 1, "two" -> 2),
- mutable.HashMap(1->"one", 2->"two", 3->"three")))
+ mutable.HashMap(1 -> "one", 2 -> "two", 3 -> "three")))
}
test("ranges") {