aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-03-10 15:57:22 -0800
committerAndrew Or <andrew@databricks.com>2016-03-10 15:57:22 -0800
commit91fed8e9c57764eca9463d129ecd68196db7f566 (patch)
treeb06c678dc15258af92116019760e6b9c98d81c2d /core
parent81d48532d954a8aea28d7e1fb3aa32a78c708b63 (diff)
downloadspark-91fed8e9c57764eca9463d129ecd68196db7f566.tar.gz
spark-91fed8e9c57764eca9463d129ecd68196db7f566.tar.bz2
spark-91fed8e9c57764eca9463d129ecd68196db7f566.zip
[SPARK-3854][BUILD] Scala style: require spaces before `{`.
## What changes were proposed in this pull request? Since the opening curly brace, '{', has many usages as discussed in [SPARK-3854](https://issues.apache.org/jira/browse/SPARK-3854), this PR adds a ScalaStyle rule to prevent '){' pattern for the following majority pattern and fixes the code accordingly. If we enforce this in ScalaStyle from now, it will improve the Scala code quality and reduce review time. ``` // Correct: if (true) { println("Wow!") } // Incorrect: if (true){ println("Wow!") } ``` IntelliJ also shows new warnings based on this. ## How was this patch tested? Pass the Jenkins ScalaStyle test. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #11637 from dongjoon-hyun/SPARK-3854.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/RDD.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/AccumulatorSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala2
9 files changed, 9 insertions, 9 deletions
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 8bf4489e1a..8782fcda16 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -722,7 +722,7 @@ abstract class RDD[T: ClassTag](
* An example of pipe the RDD data of groupBy() in a streaming way,
* instead of constructing a huge String to concat all the elements:
* def printRDDElement(record:(String, Seq[String]), f:String=&gt;Unit) =
- * for (e &lt;- record._2){f(e)}
+ * for (e &lt;- record._2) {f(e)}
* @param separateWorkingDir Use separate working directories for each task.
* @return the result RDD
*/
diff --git a/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala b/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
index 8b4ebf34ba..2950df62bf 100644
--- a/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/RpcTimeout.scala
@@ -119,7 +119,7 @@ private[spark] object RpcTimeout {
// Find the first set property or use the default value with the first property
val itr = timeoutPropList.iterator
var foundProp: Option[(String, String)] = None
- while (itr.hasNext && foundProp.isEmpty){
+ while (itr.hasNext && foundProp.isEmpty) {
val propKey = itr.next()
conf.getOption(propKey).foreach { prop => foundProp = Some(propKey, prop) }
}
diff --git a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
index 8235b10245..def0aac720 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
@@ -173,7 +173,7 @@ object InputFormatInfo {
for (inputSplit <- formats) {
val splits = inputSplit.findPreferredLocations()
- for (split <- splits){
+ for (split <- splits) {
val location = split.hostLocation
val set = nodeToSplit.getOrElseUpdate(location, new HashSet[SplitInfo])
set += split
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 8b2f4973ef..36df032c25 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -623,7 +623,7 @@ private[spark] object TaskSchedulerImpl {
val containerList: ArrayBuffer[T] = map.getOrElse(key, null)
assert(containerList != null)
// Get the index'th entry for this host - if present
- if (index < containerList.size){
+ if (index < containerList.size) {
retval += containerList.apply(index)
found = true
}
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala
index f9812f06cf..3e6d2942d0 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala
@@ -33,7 +33,7 @@ private[v1] class OneStageResource(ui: SparkUI) {
@GET
@Path("")
def stageData(@PathParam("stageId") stageId: Int): Seq[StageData] = {
- withStage(stageId){ stageAttempts =>
+ withStage(stageId) { stageAttempts =>
stageAttempts.map { stage =>
AllStagesResource.stageUiToStageData(stage.status, stage.info, stage.ui,
includeDetails = true)
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
index 69ac37511e..cae7c9ed95 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
@@ -44,7 +44,7 @@ class BlockManagerId private (
def executorId: String = executorId_
- if (null != host_){
+ if (null != host_) {
Utils.checkHost(host_, "Expected hostname")
assert (port_ > 0)
}
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 4ff8ae57ab..61ab24051e 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -57,7 +57,7 @@ class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContex
}
}
- test ("basic accumulation"){
+ test ("basic accumulation") {
sc = new SparkContext("local", "test")
val acc : Accumulator[Int] = sc.accumulator(0)
diff --git a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
index 4399f25626..939f12f94f 100644
--- a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
@@ -21,7 +21,7 @@ import org.apache.spark.rdd.RDD
class ImplicitOrderingSuite extends SparkFunSuite with LocalSparkContext {
// Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should.
- test("basic inference of Orderings"){
+ test("basic inference of Orderings") {
sc = new SparkContext("local", "test")
val rdd = sc.parallelize(1 to 10)
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
index de6680c610..e24188781f 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
@@ -476,7 +476,7 @@ class ApplicationCacheSuite extends SparkFunSuite with Logging with MockitoSugar
when(request.getRequestURI()).thenReturn("http://localhost:18080/history/local-123/jobs/job/")
when(request.getQueryString()).thenReturn("id=2")
val resp = mock[HttpServletResponse]
- when(resp.encodeRedirectURL(any())).thenAnswer(new Answer[String](){
+ when(resp.encodeRedirectURL(any())).thenAnswer(new Answer[String]() {
override def answer(invocationOnMock: InvocationOnMock): String = {
invocationOnMock.getArguments()(0).asInstanceOf[String]
}