aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala8
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java4
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/regression/JavaIsotonicRegressionSuite.java18
3 files changed, 15 insertions, 15 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
index e47c4db629..ca11ede4cc 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
@@ -20,7 +20,7 @@ package org.apache.spark.mllib.clustering
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.Logging
-import org.apache.spark.annotation.{Experimental, Since}
+import org.apache.spark.annotation.Since
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.linalg.BLAS.{axpy, scal}
import org.apache.spark.mllib.util.MLUtils
@@ -107,7 +107,7 @@ class KMeans private (
* Number of runs of the algorithm to execute in parallel.
*/
@Since("1.4.0")
- @deprecated("Support for runs is deprecated. This param will have no effect in 1.7.0.", "1.6.0")
+ @deprecated("Support for runs is deprecated. This param will have no effect in 2.0.0.", "1.6.0")
def getRuns: Int = runs
/**
@@ -117,7 +117,7 @@ class KMeans private (
* return the best clustering found over any run. Default: 1.
*/
@Since("0.8.0")
- @deprecated("Support for runs is deprecated. This param will have no effect in 1.7.0.", "1.6.0")
+ @deprecated("Support for runs is deprecated. This param will have no effect in 2.0.0.", "1.6.0")
def setRuns(runs: Int): this.type = {
if (runs <= 0) {
throw new IllegalArgumentException("Number of runs must be positive")
@@ -431,7 +431,7 @@ class KMeans private (
val rs = (0 until runs).filter { r =>
rand.nextDouble() < 2.0 * c(r) * k / sumCosts(r)
}
- if (rs.length > 0) Some(p, rs) else None
+ if (rs.length > 0) Some((p, rs)) else None
}
}.collect()
mergeNewCenters()
diff --git a/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
index 271dda4662..a6631ed7eb 100644
--- a/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
@@ -56,10 +56,10 @@ public class JavaALSSuite implements Serializable {
double matchThreshold,
boolean implicitPrefs,
DoubleMatrix truePrefs) {
- List<Tuple2<Integer, Integer>> localUsersProducts = new ArrayList(users * products);
+ List<Tuple2<Integer, Integer>> localUsersProducts = new ArrayList<>(users * products);
for (int u=0; u < users; ++u) {
for (int p=0; p < products; ++p) {
- localUsersProducts.add(new Tuple2<Integer, Integer>(u, p));
+ localUsersProducts.add(new Tuple2<>(u, p));
}
}
JavaPairRDD<Integer, Integer> usersProducts = sc.parallelizePairs(localUsersProducts);
diff --git a/mllib/src/test/java/org/apache/spark/mllib/regression/JavaIsotonicRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaIsotonicRegressionSuite.java
index 32c2f4f339..3db9b39e74 100644
--- a/mllib/src/test/java/org/apache/spark/mllib/regression/JavaIsotonicRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaIsotonicRegressionSuite.java
@@ -36,11 +36,11 @@ import org.apache.spark.api.java.JavaSparkContext;
public class JavaIsotonicRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
- private List<Tuple3<Double, Double, Double>> generateIsotonicInput(double[] labels) {
- ArrayList<Tuple3<Double, Double, Double>> input = new ArrayList(labels.length);
+ private static List<Tuple3<Double, Double, Double>> generateIsotonicInput(double[] labels) {
+ List<Tuple3<Double, Double, Double>> input = new ArrayList<>(labels.length);
for (int i = 1; i <= labels.length; i++) {
- input.add(new Tuple3<Double, Double, Double>(labels[i-1], (double) i, 1d));
+ input.add(new Tuple3<>(labels[i-1], (double) i, 1.0));
}
return input;
@@ -70,7 +70,7 @@ public class JavaIsotonicRegressionSuite implements Serializable {
runIsotonicRegression(new double[]{1, 2, 3, 3, 1, 6, 7, 8, 11, 9, 10, 12});
Assert.assertArrayEquals(
- new double[] {1, 2, 7d/3, 7d/3, 6, 7, 8, 10, 10, 12}, model.predictions(), 1e-14);
+ new double[] {1, 2, 7.0/3, 7.0/3, 6, 7, 8, 10, 10, 12}, model.predictions(), 1.0e-14);
}
@Test
@@ -81,10 +81,10 @@ public class JavaIsotonicRegressionSuite implements Serializable {
JavaDoubleRDD testRDD = sc.parallelizeDoubles(Arrays.asList(0.0, 1.0, 9.5, 12.0, 13.0));
List<Double> predictions = model.predict(testRDD).collect();
- Assert.assertTrue(predictions.get(0) == 1d);
- Assert.assertTrue(predictions.get(1) == 1d);
- Assert.assertTrue(predictions.get(2) == 10d);
- Assert.assertTrue(predictions.get(3) == 12d);
- Assert.assertTrue(predictions.get(4) == 12d);
+ Assert.assertEquals(1.0, predictions.get(0).doubleValue(), 1.0e-14);
+ Assert.assertEquals(1.0, predictions.get(1).doubleValue(), 1.0e-14);
+ Assert.assertEquals(10.0, predictions.get(2).doubleValue(), 1.0e-14);
+ Assert.assertEquals(12.0, predictions.get(3).doubleValue(), 1.0e-14);
+ Assert.assertEquals(12.0, predictions.get(4).doubleValue(), 1.0e-14);
}
}