aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-05-29 13:38:37 -0700
committerReynold Xin <rxin@databricks.com>2015-05-29 13:38:37 -0700
commit94f62a4979e4bc5f7bf4f5852d76977e097209e6 (patch)
tree8bdc73d6326fab1519a192fa1b8d07c583608059 /mllib
parent6181937f315480543d28e542d43269cfa591e9d0 (diff)
downloadspark-94f62a4979e4bc5f7bf4f5852d76977e097209e6.tar.gz
spark-94f62a4979e4bc5f7bf4f5852d76977e097209e6.tar.bz2
spark-94f62a4979e4bc5f7bf4f5852d76977e097209e6.zip
[SPARK-7940] Enforce whitespace checking for DO, TRY, CATCH, FINALLY, MATCH, LARROW, RARROW in style checker.
… Author: Reynold Xin <rxin@databricks.com> Closes #6491 from rxin/more-whitespace and squashes the following commits: f6e63dc [Reynold Xin] [SPARK-7940] Enforce whitespace checking for DO, TRY, CATCH, FINALLY, MATCH, LARROW, RARROW in style checker.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala4
2 files changed, 3 insertions, 3 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
index ee710fc1ed..a6d1398fc2 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
@@ -83,7 +83,7 @@ class Node (
def predict(features: Vector) : Double = {
if (isLeaf) {
predict.predict
- } else{
+ } else {
if (split.get.featureType == Continuous) {
if (features(split.get.feature) <= split.get.threshold) {
leftNode.get.predict(features)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
index 966811a5a3..b1014ab7c6 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -119,7 +119,7 @@ object LogisticRegressionSuite {
}
// Preventing the overflow when we compute the probability
val maxMargin = margins.max
- if (maxMargin > 0) for (i <-0 until nClasses) margins(i) -= maxMargin
+ if (maxMargin > 0) for (i <- 0 until nClasses) margins(i) -= maxMargin
// Computing the probabilities for each class from the margins.
val norm = {
@@ -130,7 +130,7 @@ object LogisticRegressionSuite {
}
temp
}
- for (i <-0 until nClasses) probs(i) /= norm
+ for (i <- 0 until nClasses) probs(i) /= norm
// Compute the cumulative probability so we can generate a random number and assign a label.
for (i <- 1 until nClasses) probs(i) += probs(i - 1)