aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorKousuke Saruta <sarutak@oss.nttdata.co.jp>2016-01-12 19:24:50 -0800
committerReynold Xin <rxin@databricks.com>2016-01-12 19:24:50 -0800
commitf14922cff84b1e0984ba4597d764615184126bdc (patch)
treefca9887aaa9c8175530cfef897bee52ef9a95020 /core
parentb3b9ad23cffc1c6d83168487093e4c03d49e1c2c (diff)
downloadspark-f14922cff84b1e0984ba4597d764615184126bdc.tar.gz
spark-f14922cff84b1e0984ba4597d764615184126bdc.tar.bz2
spark-f14922cff84b1e0984ba4597d764615184126bdc.zip
[SPARK-12692][BUILD][CORE] Scala style: Fix the style violation (Space before ",")
Fix the style violation (space before , and :). This PR is a followup for #10643 Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #10719 from sarutak/SPARK-12692-followup-core.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/RDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/api.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala2
5 files changed, 5 insertions, 5 deletions
diff --git a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
index 18e8cddbc4..57108dcedc 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
@@ -50,7 +50,7 @@ class CartesianRDD[T: ClassTag, U: ClassTag](
sc: SparkContext,
var rdd1 : RDD[T],
var rdd2 : RDD[U])
- extends RDD[Pair[T, U]](sc, Nil)
+ extends RDD[(T, U)](sc, Nil)
with Serializable {
val numPartitionsInRdd2 = rdd2.partitions.length
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 53e01a0dbf..9dad794414 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -95,7 +95,7 @@ abstract class RDD[T: ClassTag](
/** Construct an RDD with just a one-to-one dependency on one parent */
def this(@transient oneParent: RDD[_]) =
- this(oneParent.context , List(new OneToOneDependency(oneParent)))
+ this(oneParent.context, List(new OneToOneDependency(oneParent)))
private[spark] def conf = sc.conf
// =======================================================================
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
index 5feb1dc2e5..9cd52d6c2b 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
@@ -115,7 +115,7 @@ class StageData private[spark](
val status: StageStatus,
val stageId: Int,
val attemptId: Int,
- val numActiveTasks: Int ,
+ val numActiveTasks: Int,
val numCompleteTasks: Int,
val numFailedTasks: Int,
diff --git a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
index 4e72b89bfc..76451788d2 100644
--- a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
@@ -178,7 +178,7 @@ class DoubleRDDSuite extends SparkFunSuite with SharedSparkContext {
test("WorksWithOutOfRangeWithInfiniteBuckets") {
// Verify that out of range works with two buckets
val rdd = sc.parallelize(Seq(10.01, -0.01, Double.NaN))
- val buckets = Array(-1.0/0.0 , 0.0, 1.0/0.0)
+ val buckets = Array(-1.0/0.0, 0.0, 1.0/0.0)
val histogramResults = rdd.histogram(buckets)
val expectedHistogramResults = Array(1, 1)
assert(histogramResults === expectedHistogramResults)
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
index 504e5780f3..e111e2e9f6 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
@@ -76,7 +76,7 @@ class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext wi
test("check spark-class location correctly") {
val conf = new SparkConf
- conf.set("spark.mesos.executor.home" , "/mesos-home")
+ conf.set("spark.mesos.executor.home", "/mesos-home")
val listenerBus = mock[LiveListenerBus]
listenerBus.post(