aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/test/scala/org/apache/spark/CheckpointSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/io/ChunkedByteBufferSuite.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala2
-rw-r--r--mllib-local/src/test/scala/org/apache/spark/ml/linalg/MatricesSuite.scala4
-rw-r--r--mllib-local/src/test/scala/org/apache/spark/ml/util/TestingUtilsSuite.scala24
-rw-r--r--mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/stat/test/KolmogorovSmirnovTest.scala3
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala2
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala24
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala10
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala2
19 files changed, 51 insertions, 50 deletions
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 9f94e36324..b117c7709b 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -500,7 +500,7 @@ class CheckpointSuite extends SparkFunSuite with RDDCheckpointTester with LocalS
}
runTest("CheckpointRDD with zero partitions") { reliableCheckpoint: Boolean =>
- val rdd = new BlockRDD[Int](sc, Array[BlockId]())
+ val rdd = new BlockRDD[Int](sc, Array.empty[BlockId])
assert(rdd.partitions.size === 0)
assert(rdd.isCheckpointed === false)
assert(rdd.isCheckpointedAndMaterialized === false)
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 2d48e75cfb..7093dad05c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -65,7 +65,7 @@ class JsonProtocolSuite extends SparkFunSuite with JsonTestUtils {
test("writeMasterState") {
val workers = Array(createWorkerInfo(), createWorkerInfo())
val activeApps = Array(createAppInfo())
- val completedApps = Array[ApplicationInfo]()
+ val completedApps = Array.empty[ApplicationInfo]
val activeDrivers = Array(createDriverInfo())
val completedDrivers = Array(createDriverInfo())
val stateResponse = new MasterStateResponse(
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 732cbfaaee..7c649e305a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -91,7 +91,7 @@ class SparkSubmitSuite
// scalastyle:off println
test("prints usage on empty input") {
- testPrematureExit(Array[String](), "Usage: spark-submit")
+ testPrematureExit(Array.empty[String], "Usage: spark-submit")
}
test("prints usage with only --help") {
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala
index 34f27ecaa0..de321db845 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala
@@ -33,7 +33,7 @@ class HistoryServerArgumentsSuite extends SparkFunSuite {
.set("spark.testing", "true")
test("No Arguments Parsing") {
- val argStrings = Array[String]()
+ val argStrings = Array.empty[String]
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === logDir.getAbsolutePath)
assert(conf.get("spark.history.fs.updateInterval") === "1")
diff --git a/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferSuite.scala b/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferSuite.scala
index 38b48a4c9e..3b798e36b0 100644
--- a/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferSuite.scala
@@ -57,7 +57,7 @@ class ChunkedByteBufferSuite extends SparkFunSuite {
}
test("toArray()") {
- val empty = ByteBuffer.wrap(Array[Byte]())
+ val empty = ByteBuffer.wrap(Array.empty[Byte])
val bytes = ByteBuffer.wrap(Array.tabulate(8)(_.toByte))
val chunkedByteBuffer = new ChunkedByteBuffer(Array(bytes, bytes, empty))
assert(chunkedByteBuffer.toArray === bytes.array() ++ bytes.array())
@@ -74,7 +74,7 @@ class ChunkedByteBufferSuite extends SparkFunSuite {
}
test("toInputStream()") {
- val empty = ByteBuffer.wrap(Array[Byte]())
+ val empty = ByteBuffer.wrap(Array.empty[Byte])
val bytes1 = ByteBuffer.wrap(Array.tabulate(256)(_.toByte))
val bytes2 = ByteBuffer.wrap(Array.tabulate(128)(_.toByte))
val chunkedByteBuffer = new ChunkedByteBuffer(Array(empty, bytes1, bytes2))
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 57a8231200..bc6e98365d 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -100,7 +100,7 @@ class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
check(Array("aaa", "bbb", null))
check(Array(true, false, true))
check(Array('a', 'b', 'c'))
- check(Array[Int]())
+ check(Array.empty[Int])
check(Array(Array("1", "2"), Array("1", "2", "3", "4")))
}
diff --git a/mllib-local/src/test/scala/org/apache/spark/ml/linalg/MatricesSuite.scala b/mllib-local/src/test/scala/org/apache/spark/ml/linalg/MatricesSuite.scala
index 2796fcf2cb..9c0aa73938 100644
--- a/mllib-local/src/test/scala/org/apache/spark/ml/linalg/MatricesSuite.scala
+++ b/mllib-local/src/test/scala/org/apache/spark/ml/linalg/MatricesSuite.scala
@@ -287,7 +287,7 @@ class MatricesSuite extends SparkMLFunSuite {
val spHorz2 = Matrices.horzcat(Array(spMat1, deMat2))
val spHorz3 = Matrices.horzcat(Array(deMat1, spMat2))
val deHorz1 = Matrices.horzcat(Array(deMat1, deMat2))
- val deHorz2 = Matrices.horzcat(Array[Matrix]())
+ val deHorz2 = Matrices.horzcat(Array.empty[Matrix])
assert(deHorz1.numRows === 3)
assert(spHorz2.numRows === 3)
@@ -341,7 +341,7 @@ class MatricesSuite extends SparkMLFunSuite {
val deVert1 = Matrices.vertcat(Array(deMat1, deMat3))
val spVert2 = Matrices.vertcat(Array(spMat1, deMat3))
val spVert3 = Matrices.vertcat(Array(deMat1, spMat3))
- val deVert2 = Matrices.vertcat(Array[Matrix]())
+ val deVert2 = Matrices.vertcat(Array.empty[Matrix])
assert(deVert1.numRows === 5)
assert(spVert2.numRows === 5)
diff --git a/mllib-local/src/test/scala/org/apache/spark/ml/util/TestingUtilsSuite.scala b/mllib-local/src/test/scala/org/apache/spark/ml/util/TestingUtilsSuite.scala
index 5cbf2f04e6..2dc0ee32d5 100644
--- a/mllib-local/src/test/scala/org/apache/spark/ml/util/TestingUtilsSuite.scala
+++ b/mllib-local/src/test/scala/org/apache/spark/ml/util/TestingUtilsSuite.scala
@@ -110,9 +110,9 @@ class TestingUtilsSuite extends SparkMLFunSuite {
assert(!(Vectors.dense(Array(3.1, 3.5)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01))
assert(!(Vectors.dense(Array(3.1, 3.5)) ~= Vectors.dense(Array(3.135, 3.534)) relTol 0.01))
assert(Vectors.dense(Array(3.1)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
assert(Vectors.dense(Array(3.1)) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
// Should throw exception with message when test fails.
intercept[TestFailedException](
@@ -125,7 +125,7 @@ class TestingUtilsSuite extends SparkMLFunSuite {
Vectors.dense(Array(3.1)) ~== Vectors.dense(Array(3.535, 3.534)) relTol 0.01)
intercept[TestFailedException](
- Vectors.dense(Array[Double]()) ~== Vectors.dense(Array(3.135)) relTol 0.01)
+ Vectors.dense(Array.empty[Double]) ~== Vectors.dense(Array(3.135)) relTol 0.01)
// Comparing against zero should fail the test and throw exception with message
// saying that the relative error is meaningless in this situation.
@@ -145,7 +145,7 @@ class TestingUtilsSuite extends SparkMLFunSuite {
assert(Vectors.dense(Array(3.1)) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
}
@@ -176,14 +176,14 @@ class TestingUtilsSuite extends SparkMLFunSuite {
assert(!(Vectors.dense(Array(3.1)) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) !~=
+ assert(Vectors.dense(Array.empty[Double]) !~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5)
- assert(!(Vectors.dense(Array[Double]()) ~=
+ assert(!(Vectors.dense(Array.empty[Double]) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) ~=
- Vectors.dense(Array[Double]()) absTol 1E-5)
+ assert(Vectors.dense(Array.empty[Double]) ~=
+ Vectors.dense(Array.empty[Double]) absTol 1E-5)
// Should throw exception with message when test fails.
intercept[TestFailedException](Vectors.dense(Array(3.1, 3.5, 0.0)) !~==
@@ -195,7 +195,7 @@ class TestingUtilsSuite extends SparkMLFunSuite {
intercept[TestFailedException](Vectors.dense(Array(3.1)) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
- intercept[TestFailedException](Vectors.dense(Array[Double]()) ~==
+ intercept[TestFailedException](Vectors.dense(Array.empty[Double]) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
// Comparisons of two sparse vectors
@@ -214,7 +214,7 @@ class TestingUtilsSuite extends SparkMLFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1 + 1E-6, 2.4)) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
- assert(Vectors.sparse(0, Array[Int](), Array[Double]()) !~==
+ assert(Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
// Comparisons of a dense vector and a sparse vector
@@ -230,14 +230,14 @@ class TestingUtilsSuite extends SparkMLFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1, 2.4)) !~==
Vectors.dense(Array(3.1)) absTol 1E-6)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(3, Array(0, 2), Array(0, 2.4)) absTol 1E-6)
assert(Vectors.sparse(1, Array(0), Array(3.1)) !~==
Vectors.dense(Array(3.1, 3.2)) absTol 1E-6)
assert(Vectors.dense(Array(3.1)) !~==
- Vectors.sparse(0, Array[Int](), Array[Double]()) absTol 1E-6)
+ Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) absTol 1E-6)
}
test("Comparing Matrices using absolute error.") {
diff --git a/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala b/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
index 862a468745..8fdaae04c4 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
@@ -622,7 +622,7 @@ class LogisticRegression @Since("1.2.0") (
rawCoefficients(coefIndex)
}
} else {
- Array[Double]()
+ Array.empty[Double]
}
val interceptVector = if (interceptsArray.nonEmpty && isMultinomial) {
// The intercepts are never regularized, so we always center the mean.
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/stat/test/KolmogorovSmirnovTest.scala b/mllib/src/main/scala/org/apache/spark/mllib/stat/test/KolmogorovSmirnovTest.scala
index c3de5d75f4..a8b5955a72 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/stat/test/KolmogorovSmirnovTest.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/stat/test/KolmogorovSmirnovTest.scala
@@ -124,7 +124,8 @@ private[stat] object KolmogorovSmirnovTest extends Logging {
val pResults = partDiffs.foldLeft(initAcc) { case ((pMin, pMax, pCt), (dl, dp)) =>
(math.min(pMin, dl), math.max(pMax, dp), pCt + 1)
}
- val results = if (pResults == initAcc) Array[(Double, Double, Double)]() else Array(pResults)
+ val results =
+ if (pResults == initAcc) Array.empty[(Double, Double, Double)] else Array(pResults)
results.iterator
}
diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
index c08cb69580..41684d92be 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/classification/MultilayerPerceptronClassifierSuite.scala
@@ -51,7 +51,7 @@ class MultilayerPerceptronClassifierSuite
test("Input Validation") {
val mlpc = new MultilayerPerceptronClassifier()
intercept[IllegalArgumentException] {
- mlpc.setLayers(Array[Int]())
+ mlpc.setLayers(Array.empty[Int])
}
intercept[IllegalArgumentException] {
mlpc.setLayers(Array[Int](1))
diff --git a/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
index 5eaef9aabd..3bb760f2ec 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/python/MLSerDeSuite.scala
@@ -54,7 +54,7 @@ class MLSerDeSuite extends SparkFunSuite {
assert(matrix === nm)
// Test conversion for empty matrix
- val empty = Array[Double]()
+ val empty = Array.empty[Double]
val emptyMatrix = Matrices.dense(0, 0, empty)
val ne = MLSerDe.loads(MLSerDe.dumps(emptyMatrix)).asInstanceOf[DenseMatrix]
assert(emptyMatrix == ne)
diff --git a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
index 499d386e66..3bded9c017 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
@@ -154,10 +154,10 @@ class RandomForestSuite extends SparkFunSuite with MLlibTestSparkContext {
val featureSamples = Array(0, 0, 0).map(_.toDouble)
val featureSamplesEmpty = Array.empty[Double]
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
- assert(splits === Array[Double]())
+ assert(splits === Array.empty[Double])
val splitsEmpty =
RandomForest.findSplitsForContinuousFeature(featureSamplesEmpty, fakeMetadata, 0)
- assert(splitsEmpty === Array[Double]())
+ assert(splitsEmpty === Array.empty[Double])
}
}
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
index 0eb839f20c..5f85c0d65f 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/api/python/PythonMLLibAPISuite.scala
@@ -72,7 +72,7 @@ class PythonMLLibAPISuite extends SparkFunSuite {
assert(matrix === nm)
// Test conversion for empty matrix
- val empty = Array[Double]()
+ val empty = Array.empty[Double]
val emptyMatrix = Matrices.dense(0, 0, empty)
val ne = SerDe.loads(SerDe.dumps(emptyMatrix)).asInstanceOf[DenseMatrix]
assert(emptyMatrix == ne)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
index 8e9d910e64..f334be2c2b 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/evaluation/RankingMetricsSuite.scala
@@ -28,7 +28,7 @@ class RankingMetricsSuite extends SparkFunSuite with MLlibTestSparkContext {
Seq(
(Array(1, 6, 2, 7, 8, 3, 9, 10, 4, 5), Array(1, 2, 3, 4, 5)),
(Array(4, 1, 5, 6, 2, 7, 3, 8, 9, 10), Array(1, 2, 3)),
- (Array(1, 2, 3, 4, 5), Array[Int]())
+ (Array(1, 2, 3, 4, 5), Array.empty[Int])
), 2)
val eps = 1.0E-5
@@ -55,7 +55,7 @@ class RankingMetricsSuite extends SparkFunSuite with MLlibTestSparkContext {
val predictionAndLabels = sc.parallelize(
Seq(
(Array(1, 6, 2), Array(1, 2, 3, 4, 5)),
- (Array[Int](), Array(1, 2, 3))
+ (Array.empty[Int], Array(1, 2, 3))
), 2)
val eps = 1.0E-5
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
index d0c4dd28e1..563756907d 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala
@@ -289,7 +289,7 @@ class MatricesSuite extends SparkFunSuite {
val spHorz2 = Matrices.horzcat(Array(spMat1, deMat2))
val spHorz3 = Matrices.horzcat(Array(deMat1, spMat2))
val deHorz1 = Matrices.horzcat(Array(deMat1, deMat2))
- val deHorz2 = Matrices.horzcat(Array[Matrix]())
+ val deHorz2 = Matrices.horzcat(Array.empty[Matrix])
assert(deHorz1.numRows === 3)
assert(spHorz2.numRows === 3)
@@ -343,7 +343,7 @@ class MatricesSuite extends SparkFunSuite {
val deVert1 = Matrices.vertcat(Array(deMat1, deMat3))
val spVert2 = Matrices.vertcat(Array(spMat1, deMat3))
val spVert3 = Matrices.vertcat(Array(deMat1, spMat3))
- val deVert2 = Matrices.vertcat(Array[Matrix]())
+ val deVert2 = Matrices.vertcat(Array.empty[Matrix])
assert(deVert1.numRows === 5)
assert(spVert2.numRows === 5)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
index 1aff44480a..3fcf1cf2c2 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/TestingUtilsSuite.scala
@@ -110,9 +110,9 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(!(Vectors.dense(Array(3.1, 3.5)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01))
assert(!(Vectors.dense(Array(3.1, 3.5)) ~= Vectors.dense(Array(3.135, 3.534)) relTol 0.01))
assert(Vectors.dense(Array(3.1)) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~= Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
assert(Vectors.dense(Array(3.1)) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
+ assert(Vectors.dense(Array.empty[Double]) !~== Vectors.dense(Array(3.130, 3.534)) relTol 0.01)
// Should throw exception with message when test fails.
intercept[TestFailedException](
@@ -125,7 +125,7 @@ class TestingUtilsSuite extends SparkFunSuite {
Vectors.dense(Array(3.1)) ~== Vectors.dense(Array(3.535, 3.534)) relTol 0.01)
intercept[TestFailedException](
- Vectors.dense(Array[Double]()) ~== Vectors.dense(Array(3.135)) relTol 0.01)
+ Vectors.dense(Array.empty[Double]) ~== Vectors.dense(Array(3.135)) relTol 0.01)
// Comparing against zero should fail the test and throw exception with message
// saying that the relative error is meaningless in this situation.
@@ -145,7 +145,7 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.dense(Array(3.1)) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(2, Array(0, 1), Array(3.130, 3.534)) relTol 0.01)
}
@@ -176,14 +176,14 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(!(Vectors.dense(Array(3.1)) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) !~=
+ assert(Vectors.dense(Array.empty[Double]) !~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5)
- assert(!(Vectors.dense(Array[Double]()) ~=
+ assert(!(Vectors.dense(Array.empty[Double]) ~=
Vectors.dense(Array(3.1 + 1E-6, 3.5 + 2E-7)) absTol 1E-5))
- assert(Vectors.dense(Array[Double]()) ~=
- Vectors.dense(Array[Double]()) absTol 1E-5)
+ assert(Vectors.dense(Array.empty[Double]) ~=
+ Vectors.dense(Array.empty[Double]) absTol 1E-5)
// Should throw exception with message when test fails.
intercept[TestFailedException](Vectors.dense(Array(3.1, 3.5, 0.0)) !~==
@@ -195,7 +195,7 @@ class TestingUtilsSuite extends SparkFunSuite {
intercept[TestFailedException](Vectors.dense(Array(3.1)) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
- intercept[TestFailedException](Vectors.dense(Array[Double]()) ~==
+ intercept[TestFailedException](Vectors.dense(Array.empty[Double]) ~==
Vectors.dense(Array(3.1 + 1E-5, 3.5 + 2E-7)) absTol 1E-6)
// Comparisons of two sparse vectors
@@ -214,7 +214,7 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1 + 1E-6, 2.4)) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
- assert(Vectors.sparse(0, Array[Int](), Array[Double]()) !~==
+ assert(Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) !~==
Vectors.sparse(1, Array(0), Array(3.1)) absTol 1E-3)
// Comparisons of a dense vector and a sparse vector
@@ -230,14 +230,14 @@ class TestingUtilsSuite extends SparkFunSuite {
assert(Vectors.sparse(3, Array(0, 2), Array(3.1, 2.4)) !~==
Vectors.dense(Array(3.1)) absTol 1E-6)
- assert(Vectors.dense(Array[Double]()) !~==
+ assert(Vectors.dense(Array.empty[Double]) !~==
Vectors.sparse(3, Array(0, 2), Array(0, 2.4)) absTol 1E-6)
assert(Vectors.sparse(1, Array(0), Array(3.1)) !~==
Vectors.dense(Array(3.1, 3.2)) absTol 1E-6)
assert(Vectors.dense(Array(3.1)) !~==
- Vectors.sparse(0, Array[Int](), Array[Double]()) absTol 1E-6)
+ Vectors.sparse(0, Array.empty[Int], Array.empty[Double]) absTol 1E-6)
}
test("Comparing Matrices using absolute error.") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index fdb9fa31f0..26978a0482 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -215,13 +215,13 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Substring(bytes, 2, 2), Array[Byte](2, 3))
checkEvaluation(Substring(bytes, 3, 2), Array[Byte](3, 4))
checkEvaluation(Substring(bytes, 4, 2), Array[Byte](4))
- checkEvaluation(Substring(bytes, 8, 2), Array[Byte]())
+ checkEvaluation(Substring(bytes, 8, 2), Array.empty[Byte])
checkEvaluation(Substring(bytes, -1, 2), Array[Byte](4))
checkEvaluation(Substring(bytes, -2, 2), Array[Byte](3, 4))
checkEvaluation(Substring(bytes, -3, 2), Array[Byte](2, 3))
checkEvaluation(Substring(bytes, -4, 2), Array[Byte](1, 2))
checkEvaluation(Substring(bytes, -5, 2), Array[Byte](1))
- checkEvaluation(Substring(bytes, -8, 2), Array[Byte]())
+ checkEvaluation(Substring(bytes, -8, 2), Array.empty[Byte])
}
test("string substring_index function") {
@@ -275,7 +275,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(Base64(UnBase64(a)), "AQIDBA==", create_row("AQIDBA=="))
checkEvaluation(Base64(b), "AQIDBA==", create_row(bytes))
- checkEvaluation(Base64(b), "", create_row(Array[Byte]()))
+ checkEvaluation(Base64(b), "", create_row(Array.empty[Byte]))
checkEvaluation(Base64(b), null, create_row(null))
checkEvaluation(Base64(Literal.create(null, BinaryType)), null, create_row("abdef"))
@@ -526,13 +526,13 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(Length(Literal("a花花c")), 4, create_row(string))
// scalastyle:on
- checkEvaluation(Length(Literal(bytes)), 5, create_row(Array[Byte]()))
+ checkEvaluation(Length(Literal(bytes)), 5, create_row(Array.empty[Byte]))
checkEvaluation(Length(a), 5, create_row(string))
checkEvaluation(Length(b), 5, create_row(bytes))
checkEvaluation(Length(a), 0, create_row(""))
- checkEvaluation(Length(b), 0, create_row(Array[Byte]()))
+ checkEvaluation(Length(b), 0, create_row(Array.empty[Byte]))
checkEvaluation(Length(a), null, create_row(null))
checkEvaluation(Length(b), null, create_row(null))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index 45db61515e..586a0fffeb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -273,7 +273,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSQLContext {
test("sort_array function") {
val df = Seq(
(Array[Int](2, 1, 3), Array("b", "c", "a")),
- (Array[Int](), Array[String]()),
+ (Array.empty[Int], Array.empty[String]),
(null, null)
).toDF("a", "b")
checkAnswer(