aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorJosh Rosen <joshrosen@databricks.com>2015-12-05 08:15:30 +0800
committerReynold Xin <rxin@databricks.com>2015-12-05 08:15:30 +0800
commitb7204e1d41271d2e8443484371770936664350b1 (patch)
tree3b09d003dce3b482282e3ae21b893fe57e607128 /mllib
parentd64806b37373c5cc4fd158a9f5005743bd00bf28 (diff)
downloadspark-b7204e1d41271d2e8443484371770936664350b1.tar.gz
spark-b7204e1d41271d2e8443484371770936664350b1.tar.bz2
spark-b7204e1d41271d2e8443484371770936664350b1.zip
[SPARK-12112][BUILD] Upgrade to SBT 0.13.9
We should upgrade to SBT 0.13.9, since this is a requirement in order to use SBT's new Maven-style resolution features (which will be done in a separate patch, because it's blocked by some binary compatibility issues in the POM reader plugin). I also upgraded Scalastyle to version 0.8.0, which was necessary in order to fix a Scala 2.10.5 compatibility issue (see https://github.com/scalastyle/scalastyle/issues/156). The newer Scalastyle is slightly stricter about whitespace surrounding tokens, so I fixed the new style violations. Author: Josh Rosen <joshrosen@databricks.com> Closes #10112 from JoshRosen/upgrade-to-sbt-0.13.9.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/param/params.scala2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/stat/test/StreamingTestMethod.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala14
5 files changed, 16 insertions, 14 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
index d182b0a988..ee7e89edd8 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
@@ -82,7 +82,9 @@ class Param[T](val parent: String, val name: String, val doc: String, val isVali
def w(value: T): ParamPair[T] = this -> value
/** Creates a param pair with the given value (for Scala). */
+ // scalastyle:off
def ->(value: T): ParamPair[T] = ParamPair(this, value)
+ // scalastyle:on
/** Encodes a param value into JSON, which can be decoded by [[jsonDecode()]]. */
def jsonEncode(value: T): String = {
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/stat/test/StreamingTestMethod.scala b/mllib/src/main/scala/org/apache/spark/mllib/stat/test/StreamingTestMethod.scala
index a7eaed51b4..911b4b9237 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/stat/test/StreamingTestMethod.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/stat/test/StreamingTestMethod.scala
@@ -152,8 +152,8 @@ private[stat] object StudentTTest extends StreamingTestMethod with Logging {
private[stat] object StreamingTestMethod {
// Note: after new `StreamingTestMethod`s are implemented, please update this map.
private final val TEST_NAME_TO_OBJECT: Map[String, StreamingTestMethod] = Map(
- "welch"->WelchTTest,
- "student"->StudentTTest)
+ "welch" -> WelchTTest,
+ "student" -> StudentTTest)
def getTestMethodFromName(method: String): StreamingTestMethod =
TEST_NAME_TO_OBJECT.get(method) match {
diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
index 92b8f84144..fda2711fed 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/classification/DecisionTreeClassifierSuite.scala
@@ -73,7 +73,7 @@ class DecisionTreeClassifierSuite extends SparkFunSuite with MLlibTestSparkConte
.setMaxDepth(2)
.setMaxBins(100)
.setSeed(1)
- val categoricalFeatures = Map(0 -> 3, 1-> 3)
+ val categoricalFeatures = Map(0 -> 3, 1 -> 3)
val numClasses = 2
compareAPIs(categoricalDataPointsRDD, dt, categoricalFeatures, numClasses)
}
@@ -214,7 +214,7 @@ class DecisionTreeClassifierSuite extends SparkFunSuite with MLlibTestSparkConte
.setMaxBins(2)
.setMaxDepth(2)
.setMinInstancesPerNode(2)
- val categoricalFeatures = Map(0 -> 2, 1-> 2)
+ val categoricalFeatures = Map(0 -> 2, 1 -> 2)
val numClasses = 2
compareAPIs(rdd, dt, categoricalFeatures, numClasses)
}
diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
index e0d5afa7a7..6999a910c3 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/regression/DecisionTreeRegressorSuite.scala
@@ -50,7 +50,7 @@ class DecisionTreeRegressorSuite extends SparkFunSuite with MLlibTestSparkContex
.setMaxDepth(2)
.setMaxBins(100)
.setSeed(1)
- val categoricalFeatures = Map(0 -> 3, 1-> 3)
+ val categoricalFeatures = Map(0 -> 3, 1 -> 3)
compareAPIs(categoricalDataPointsRDD, dt, categoricalFeatures)
}
@@ -59,12 +59,12 @@ class DecisionTreeRegressorSuite extends SparkFunSuite with MLlibTestSparkContex
.setImpurity("variance")
.setMaxDepth(2)
.setMaxBins(100)
- val categoricalFeatures = Map(0 -> 2, 1-> 2)
+ val categoricalFeatures = Map(0 -> 2, 1 -> 2)
compareAPIs(categoricalDataPointsRDD, dt, categoricalFeatures)
}
test("copied model must have the same parent") {
- val categoricalFeatures = Map(0 -> 2, 1-> 2)
+ val categoricalFeatures = Map(0 -> 2, 1 -> 2)
val df = TreeTests.setMetadata(categoricalDataPointsRDD, categoricalFeatures, numClasses = 0)
val model = new DecisionTreeRegressor()
.setImpurity("variance")
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
index 1a4299db4e..bf8fe1acac 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
@@ -64,7 +64,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
maxDepth = 2,
numClasses = 2,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 2, 1-> 2))
+ categoricalFeaturesInfo = Map(0 -> 2, 1 -> 2))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
@@ -178,7 +178,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
maxDepth = 2,
numClasses = 100,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
+ categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(metadata.isUnordered(featureIndex = 0))
@@ -237,7 +237,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
maxDepth = 2,
numClasses = 100,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 10, 1-> 10))
+ categoricalFeaturesInfo = Map(0 -> 10, 1 -> 10))
// 2^(10-1) - 1 > 100, so categorical features will be ordered
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
@@ -421,7 +421,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
numClasses = 2,
maxDepth = 2,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
+ categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
@@ -455,7 +455,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
Variance,
maxDepth = 2,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
+ categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
@@ -484,7 +484,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
Variance,
maxDepth = 2,
maxBins = 100,
- categoricalFeaturesInfo = Map(0 -> 2, 1-> 2))
+ categoricalFeaturesInfo = Map(0 -> 2, 1 -> 2))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
@@ -788,7 +788,7 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
val rdd = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini,
- maxBins = 2, maxDepth = 2, categoricalFeaturesInfo = Map(0 -> 2, 1-> 2),
+ maxBins = 2, maxDepth = 2, categoricalFeaturesInfo = Map(0 -> 2, 1 -> 2),
numClasses = 2, minInstancesPerNode = 2)
val rootNode = DecisionTree.train(rdd, strategy).topNode