aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/network/nio/Connection.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala5
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala4
-rw-r--r--scalastyle-config.xml4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala2
9 files changed, 13 insertions, 15 deletions
diff --git a/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala b/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
index 1a92a799d0..67a3761029 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
@@ -155,7 +155,7 @@ private[nio] class BlockMessage() {
override def toString: String = {
"BlockMessage [type = " + typ + ", id = " + id + ", level = " + level +
- ", data = " + (if (data != null) data.remaining.toString else "null") + "]"
+ ", data = " + (if (data != null) data.remaining.toString else "null") + "]"
}
}
diff --git a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
index 6b898bd4bf..1499da07bb 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
@@ -326,15 +326,14 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
// MUST be called within the selector loop
def connect() {
- try{
+ try {
channel.register(selector, SelectionKey.OP_CONNECT)
channel.connect(address)
logInfo("Initiating connection to [" + address + "]")
} catch {
- case e: Exception => {
+ case e: Exception =>
logError("Error connecting to " + address, e)
callOnExceptionCallbacks(e)
- }
}
}
diff --git a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
index 497871ed6d..c0bca2c4bc 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
@@ -635,12 +635,11 @@ private[nio] class ConnectionManager(
val message = securityMsgResp.toBufferMessage
if (message == null) throw new IOException("Error creating security message")
sendSecurityMessage(waitingConn.getRemoteConnectionManagerId(), message)
- } catch {
- case e: Exception => {
+ } catch {
+ case e: Exception =>
logError("Error handling sasl client authentication", e)
waitingConn.close()
throw new IOException("Error evaluating sasl response: ", e)
- }
}
}
}
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
index 7598ff617b..9e3880714a 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
@@ -86,7 +86,7 @@ class PartitionerAwareUnionRDD[T: ClassTag](
}
val location = if (locations.isEmpty) {
None
- } else {
+ } else {
// Find the location that maximum number of parent partitions prefer
Some(locations.groupBy(x => x).maxBy(_._2.length)._1)
}
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
index ee710fc1ed..a6d1398fc2 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
@@ -83,7 +83,7 @@ class Node (
def predict(features: Vector) : Double = {
if (isLeaf) {
predict.predict
- } else{
+ } else {
if (split.get.featureType == Continuous) {
if (features(split.get.feature) <= split.get.threshold) {
leftNode.get.predict(features)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
index 966811a5a3..b1014ab7c6 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -119,7 +119,7 @@ object LogisticRegressionSuite {
}
// Preventing the overflow when we compute the probability
val maxMargin = margins.max
- if (maxMargin > 0) for (i <-0 until nClasses) margins(i) -= maxMargin
+ if (maxMargin > 0) for (i <- 0 until nClasses) margins(i) -= maxMargin
// Computing the probabilities for each class from the margins.
val norm = {
@@ -130,7 +130,7 @@ object LogisticRegressionSuite {
}
temp
}
- for (i <-0 until nClasses) probs(i) /= norm
+ for (i <- 0 until nClasses) probs(i) /= norm
// Compute the cumulative probability so we can generate a random number and assign a label.
for (i <- 1 until nClasses) probs(i) += probs(i - 1)
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 68d980b610..68c8ce3b7e 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -144,12 +144,12 @@
<check level="error" class="org.scalastyle.scalariform.SpaceAfterCommentStartChecker" enabled="true"></check>
<check level="error" class="org.scalastyle.scalariform.EnsureSingleSpaceBeforeTokenChecker" enabled="true">
<parameters>
- <parameter name="tokens">ARROW, EQUALS</parameter>
+ <parameter name="tokens">ARROW, EQUALS, ELSE, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
</parameters>
</check>
<check level="error" class="org.scalastyle.scalariform.EnsureSingleSpaceAfterTokenChecker" enabled="true">
<parameters>
- <parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, WHILE, FOR</parameter>
+ <parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, ELSE, DO, WHILE, FOR, MATCH, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
</parameters>
</check>
<check level="error" class="org.scalastyle.scalariform.NotImplementedErrorUsage" enabled="true"></check>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
index bc9c37bf2d..f5d8fcced3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
@@ -203,7 +203,7 @@ object UTF8String {
def apply(s: String): UTF8String = {
if (s != null) {
new UTF8String().set(s)
- } else{
+ } else {
null
}
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index 0bdb68e8ac..2d8d950038 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -262,7 +262,7 @@ private[sql] class JDBCRDD(
}
private def escapeSql(value: String): String =
- if (value == null) null else StringUtils.replace(value, "'", "''")
+ if (value == null) null else StringUtils.replace(value, "'", "''")
/**
* Turns a single Filter into a String representing a SQL expression.