aboutsummaryrefslogtreecommitdiff
path: root/mllib/src/test/scala/org/apache
diff options
context:
space:
mode:
Diffstat (limited to 'mllib/src/test/scala/org/apache')
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala24
7 files changed, 21 insertions, 21 deletions
diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
index c08cb69580..41684d92be 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
@@ -51,7 +51,7 @@ class MultilayerPerceptronClassifierSuite
test("Input Validation") {
val mlpc = new MultilayerPerceptronClassifier()
intercept[IllegalArgumentException] {
- mlpc.setLayers(Array[Int]())
+ mlpc.setLayers(Array.empty[Int])
}
intercept[IllegalArgumentException] {
mlpc.setLayers(Array[Int](1))
diff --git a/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
index 5eaef9aabd..3bb760f2ec 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
@@ -54,7 +54,7 @@ class MLSerDeSuite extends SparkFunSuite {
assert(matrix === nm)
// Test conversion for empty matrix
- val empty = Array[Double]()
+ val empty = Array.empty[Double]
val emptyMatrix = Matrices.dense(0, 0, empty)
val ne = MLSerDe.loads(MLSerDe.dumps(emptyMatrix)).asInstanceOf[DenseMatrix]
assert(emptyMatrix == ne)
diff --git a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
index 499d386e66..3bded9c017 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
@@ -154,10 +154,10 @@ class RandomForestSuite extends SparkFunSuite with MLlibTestSparkContext {
val featureSamples = Array(0, 0, 0).map(_.toDouble)
val featureSamplesEmpty = Array.empty[Double]
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
- assert(splits === Array[Double]())
+ assert(splits === Array.empty[Double])
val splitsEmpty =
RandomForest.findSplitsForContinuousFeature(featureSamplesEmpty, fakeMetadata, 0)
- assert(splitsEmpty === Array[Double]())
+ assert(splitsEmpty === Array.empty[Double])
}
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
index 0eb839f20c..5f85c0d65f 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
@@ -72,7 +72,7 @@ class PythonMLLibAPISuite extends SparkFunSuite {
assert(matrix === nm)
// Test conversion for empty matrix
- val empty = Array[Double]()
+ val empty = Array.empty[Double]
val emptyMatrix = Matrices.dense(0, 0, empty)
val ne = SerDe.loads(SerDe.dumps(emptyMatrix)).asInstanceOf[DenseMatrix]
assert(emptyMatrix == ne)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
index 8e9d910e64..f334be2c2b 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
@@ -28,7 +28,7 @@ class RankingMetricsSuite extends SparkFunSuite with MLlibTestSparkContext {
Seq(
(Array(1, 6, 2, 7, 8, 3, 9, 10, 4, 5), Array(1, 2, 3, 4, 5)),
(Array(4, 1, 5, 6, 2, 7, 3, 8, 9, 10), Array(1, 2, 3)),
- (Array(1, 2, 3, 4, 5), Array[Int]())
+ (Array(1, 2, 3, 4, 5), Array.empty[Int])
), 2)
val eps = 1.0E-5
@@ -55,7 +55,7 @@ class RankingMetricsSuite extends SparkFunSuite with MLlibTestSparkContext {
val predictionAndLabels = sc.parallelize(
Seq(
(Array(1, 6, 2), Array(1, 2, 3, 4, 5)),
- (Array[Int](), Array(1, 2, 3))
+ (Array.empty[Int], Array(1, 2, 3))
), 2)
val eps = 1.0E-5
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
index d0c4dd28e1..563756907d 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
@@ -289,7 +289,7 @@ class MatricesSuite extends SparkFunSuite {
val spHorz2 = Matrices.horzcat(Array(spMat1, deMat2))
val spHorz3 = Matrices.horzcat(Array(deMat1, spMat2))
val deHorz1 = Matrices.horzcat(Array(deMat1, deMat2))
- val deHorz2 = Matrices.horzcat(Array[Matrix]())
+ val deHorz2 = Matrices.horzcat(Array.empty[Matrix])
assert(deHorz1.numRows === 3)
assert(spHorz2.numRows === 3)
@@ -343,7 +343,7 @@ class MatricesSuite extends SparkFunSuite {
val deVert1 = Matrices.vertcat(Array(deMat1, deMat3))
val spVert2 = Matrices.vertcat(Array(spMat1, deMat3))
val spVert3 = Matrices.vertcat(Array(deMat1, spMat3))
- val deVert2 = Matrices.vertcat(Array[Matrix]())
+ val deVert2 = Matrices.vertcat(Array.empty[Matrix])
assert(deVert1.numRows === 5)
assert(spVert2.numRows === 5)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
index 1aff44480a..3fcf1cf2c2 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
@@ -110,9 +110,9 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(!(Vectors.dense(Array(3.1, 3.5)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01))
assert(!(Vectors.dense(Array(3.1, 3.5)) ~= Vectors.dense(Array(3.135, 3.534)) relTol 0.01))
assert(Vectors.dense(Array(3.1)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
assert(Vectors.dense(Array(3.1)) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
// Should throw exception with message when test fails.
intercept[TestFailedException](
@@ -125,7 +125,7 @@ class TestingUtilsSuite extends SparkFunSuite {
Vectors.dense(Array(3.1)) ~== Vectors.dense(Array(3.535, 3.534)) relTol 0.01)
intercept[TestFailedException](
- Vectors.dense(Array[Double]()) ~== Vectors.dense(Array(3.135)) relTol 0.01)
+ Vectors.dense(Array.empty[Double]) ~== Vectors.dense(Array(3.135)) relTol 0.01)
// Comparing against zero should fail the test and throw exception with message
// saying that the relative error is meaningless in this situation.
@@ -145,7 +145,7 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.dense(Array(3.1)) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
}
@@ -176,14 +176,14 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(!(Vectors.dense(Array(3.1)) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) !~=
+ assert(Vectors.dense(Array.empty[Double]) !~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5)
- assert(!(Vectors.dense(Array[Double]()) ~=
+ assert(!(Vectors.dense(Array.empty[Double]) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) ~=
- Vectors.dense(Array[Double]()) absTol 1E-5)
+ assert(Vectors.dense(Array.empty[Double]) ~=
+ Vectors.dense(Array.empty[Double]) absTol 1E-5)
// Should throw exception with message when test fails.
intercept[TestFailedException](Vectors.dense(Array(3.1, 3.5, 0.0)) !~==
@@ -195,7 +195,7 @@ class TestingUtilsSuite extends SparkFunSuite {
intercept[TestFailedException](Vectors.dense(Array(3.1)) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
- intercept[TestFailedException](Vectors.dense(Array[Double]()) ~==
+ intercept[TestFailedException](Vectors.dense(Array.empty[Double]) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
// Comparisons of two sparse vectors
@@ -214,7 +214,7 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1 + 1E-6, 2.4)) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
- assert(Vectors.sparse(0, Array[Int](), Array[Double]()) !~==
+ assert(Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
// Comparisons of a dense vector and a sparse vector
@@ -230,14 +230,14 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1, 2.4)) !~==
Vectors.dense(Array(3.1)) absTol 1E-6)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(3, Array(0, 2), Array(0, 2.4)) absTol 1E-6)
assert(Vectors.sparse(1, Array(0), Array(3.1)) !~==
Vectors.dense(Array(3.1, 3.2)) absTol 1E-6)
assert(Vectors.dense(Array(3.1)) !~==
- Vectors.sparse(0, Array[Int](), Array[Double]()) absTol 1E-6)
+ Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) absTol 1E-6)
}
test("Comparing Matrices using absolute error.") {