aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSandeep <sandeep@techaddict.me>2014-04-24 15:07:23 -0700
committerReynold Xin <rxin@apache.org>2014-04-24 15:07:23 -0700
commita03ac222d84025a1036750e1179136a13f75dea7 (patch)
treecc7f041b35b7804b7d62520f279cc6e53e40d73c /core
parentc5c1916dd1b77e22759d58b5b361c56672983e3e (diff)
downloadspark-a03ac222d84025a1036750e1179136a13f75dea7.tar.gz
spark-a03ac222d84025a1036750e1179136a13f75dea7.tar.bz2
spark-a03ac222d84025a1036750e1179136a13f75dea7.zip
Fix Scala Style
Any comments are welcome Author: Sandeep <sandeep@techaddict.me> Closes #531 from techaddict/stylefix-1 and squashes the following commits: 7492730 [Sandeep] Pass 4 98b2428 [Sandeep] fix rxin suggestions b5e2e6f [Sandeep] Pass 3 05932d7 [Sandeep] fix if else styling 2 08690e5 [Sandeep] fix if else styling
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/Accumulators.scala7
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/Master.scala3
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala16
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManager.scala8
-rw-r--r--core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/util/FileLogger.scala4
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala3
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala4
10 files changed, 40 insertions, 28 deletions
diff --git a/core/src/main/scala/org/apache/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
index d5f3e3f6ec..6d652faae1 100644
--- a/core/src/main/scala/org/apache/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -104,8 +104,11 @@ class Accumulable[R, T] (
* Set the accumulator's value; only allowed on master.
*/
def value_= (newValue: R) {
- if (!deserialized) value_ = newValue
- else throw new UnsupportedOperationException("Can't assign accumulator value in task")
+ if (!deserialized) {
+ value_ = newValue
+ } else {
+ throw new UnsupportedOperationException("Can't assign accumulator value in task")
+ }
}
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index cc976565cc..c3e8c6b8c6 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -66,8 +66,7 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
if (k.startsWith("spark")) {
defaultProperties(k) = v
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
- }
- else {
+ } else {
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
}
}
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index 81f990bfa6..fdb633bd33 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -237,8 +237,7 @@ private[spark] class Master(
if (waitingDrivers.contains(d)) {
waitingDrivers -= d
self ! DriverStateChanged(driverId, DriverState.KILLED, None)
- }
- else {
+ } else {
// We just notify the worker to kill the driver here. The final bookkeeping occurs
// on the return path when the worker submits a state change back to the master
// to notify it that the driver was successfully killed.
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
index f918b42c83..662d37871e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
@@ -91,9 +91,11 @@ private[spark] class DriverRunner(
}
val state =
- if (killed) { DriverState.KILLED }
- else if (finalException.isDefined) { DriverState.ERROR }
- else {
+ if (killed) {
+ DriverState.KILLED
+ } else if (finalException.isDefined) {
+ DriverState.ERROR
+ } else {
finalExitCode match {
case Some(0) => DriverState.FINISHED
case _ => DriverState.FAILED
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
index fec1207948..8381f59672 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
@@ -89,8 +89,7 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
Previous {Utils.bytesToString(math.min(byteLength, startByte))}
</button>
</a>
- }
- else {
+ } else {
<button type="button" class="btn btn-default" disabled="disabled">
Previous 0 B
</button>
@@ -104,8 +103,7 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
Next {Utils.bytesToString(math.min(byteLength, logLength - endByte))}
</button>
</a>
- }
- else {
+ } else {
<button type="button" class="btn btn-default" disabled="disabled">
Next 0 B
</button>
@@ -137,9 +135,13 @@ private[spark] class LogPage(parent: WorkerWebUI) extends WebUIPage("logPage") {
val logLength = file.length()
val getOffset = offset.getOrElse(logLength - defaultBytes)
val startByte =
- if (getOffset < 0) 0L
- else if (getOffset > logLength) logLength
- else getOffset
+ if (getOffset < 0) {
+ 0L
+ } else if (getOffset > logLength) {
+ logLength
+ } else {
+ getOffset
+ }
val logPageLength = math.min(byteLength, maxBytes)
val endByte = math.min(startByte + logPageLength, logLength)
(startByte, endByte)
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index ccd5c5320a..02ba5ecf52 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -281,7 +281,9 @@ private[spark] class BlockManager(
val onDiskSize = status.diskSize
master.updateBlockInfo(
blockManagerId, blockId, storageLevel, inMemSize, onDiskSize, inTachyonSize)
- } else true
+ } else {
+ true
+ }
}
/**
@@ -676,7 +678,7 @@ private[spark] class BlockManager(
tachyonStore.putValues(blockId, iterator, level, false)
case ArrayBufferValues(array) =>
tachyonStore.putValues(blockId, array, level, false)
- case ByteBufferValues(bytes) =>
+ case ByteBufferValues(bytes) =>
bytes.rewind()
tachyonStore.putBytes(blockId, bytes, level)
}
@@ -695,7 +697,7 @@ private[spark] class BlockManager(
diskStore.putValues(blockId, iterator, level, askForBytes)
case ArrayBufferValues(array) =>
diskStore.putValues(blockId, array, level, askForBytes)
- case ByteBufferValues(bytes) =>
+ case ByteBufferValues(bytes) =>
bytes.rewind()
diskStore.putBytes(blockId, bytes, level)
}
diff --git a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
index b9f4a5d720..1b2b1932e0 100644
--- a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
+++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
@@ -43,8 +43,11 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
}
override def +=(elem: A): this.type = {
- if (size < maxSize) underlying.offer(elem)
- else maybeReplaceLowest(elem)
+ if (size < maxSize) {
+ underlying.offer(elem)
+ } else {
+ maybeReplaceLowest(elem)
+ }
this
}
@@ -59,7 +62,8 @@ private[spark] class BoundedPriorityQueue[A](maxSize: Int)(implicit ord: Orderin
if (head != null && ord.gt(a, head)) {
underlying.poll()
underlying.offer(a)
- } else false
+ } else {
+ false
+ }
}
}
-
diff --git a/core/src/main/scala/org/apache/spark/util/FileLogger.scala b/core/src/main/scala/org/apache/spark/util/FileLogger.scala
index 7d47b2a72a..1ed3b70bb2 100644
--- a/core/src/main/scala/org/apache/spark/util/FileLogger.scala
+++ b/core/src/main/scala/org/apache/spark/util/FileLogger.scala
@@ -113,7 +113,9 @@ private[spark] class FileLogger(
* @param withTime Whether to prepend message with a timestamp
*/
def log(msg: String, withTime: Boolean = false) {
- val writeInfo = if (!withTime) msg else {
+ val writeInfo = if (!withTime) {
+ msg
+ } else {
val date = new Date(System.currentTimeMillis())
dateFormat.get.format(date) + ": " + msg
}
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index a3af4e7b91..d333e2a88c 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -811,8 +811,7 @@ private[spark] object Utils extends Logging {
} else {
el.getMethodName
}
- }
- else {
+ } else {
firstUserLine = el.getLineNumber
firstUserFile = el.getFileName
firstUserClass = el.getClassName
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index d7c90346d8..2676558bfc 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -381,8 +381,8 @@ class RDDSuite extends FunSuite with SharedSparkContext {
val prng42 = new Random(42)
val prng43 = new Random(43)
Array(1, 2, 3, 4, 5, 6).filter{i =>
- if (i < 4) 0 == prng42.nextInt(3)
- else 0 == prng43.nextInt(3)}
+ if (i < 4) 0 == prng42.nextInt(3) else 0 == prng43.nextInt(3)
+ }
}
assert(sample.size === checkSample.size)
for (i <- 0 until sample.size) assert(sample(i) === checkSample(i))