aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorCodingCat <zhunansjtu@gmail.com>2014-03-12 17:43:12 -0700
committerAaron Davidson <aaron@databricks.com>2014-03-12 17:43:12 -0700
commit9032f7c0d5f1ae7985a20d54ca04c297201aae85 (patch)
treedff8324523fd8163ea369b524f73b1ef303605c0 /mllib
parentb8afe3052086547879ebf28d6e36207e0d370710 (diff)
downloadspark-9032f7c0d5f1ae7985a20d54ca04c297201aae85.tar.gz
spark-9032f7c0d5f1ae7985a20d54ca04c297201aae85.tar.bz2
spark-9032f7c0d5f1ae7985a20d54ca04c297201aae85.zip
SPARK-1160: Deprecate toArray in RDD
https://spark-project.atlassian.net/browse/SPARK-1160 reported by @mateiz: "It's redundant with collect() and the name doesn't make sense in Java, where we return a List (we can't return an array due to the way Java generics work). It's also missing in Python." In this patch, I deprecated the method and changed the source files using it by replacing toArray with collect() directly Author: CodingCat <zhunansjtu@gmail.com> Closes #105 from CodingCat/SPARK-1060 and squashes the following commits: 286f163 [CodingCat] deprecate in JavaRDDLike ee17b4e [CodingCat] add message and since 2ff7319 [CodingCat] deprecate toArray in RDD
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/linalg/SVD.scala4
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/linalg/SVDSuite.scala6
2 files changed, 5 insertions, 5 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/linalg/SVD.scala b/mllib/src/main/scala/org/apache/spark/mllib/linalg/SVD.scala
index 8803c4c1a0..e4a26eeb07 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/linalg/SVD.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/linalg/SVD.scala
@@ -109,7 +109,7 @@ object SVD {
// Construct jblas A^T A locally
val ata = DoubleMatrix.zeros(n, n)
- for (entry <- emits.toArray) {
+ for (entry <- emits.collect()) {
ata.put(entry._1._1, entry._1._2, entry._2)
}
@@ -178,7 +178,7 @@ object SVD {
val s = decomposed.S.data
val v = decomposed.V.data
- println("Computed " + s.toArray.length + " singular values and vectors")
+ println("Computed " + s.collect().length + " singular values and vectors")
u.saveAsTextFile(output_u)
s.saveAsTextFile(output_s)
v.saveAsTextFile(output_v)
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/linalg/SVDSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/linalg/SVDSuite.scala
index 32f3f141cd..a92386865a 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/linalg/SVDSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/linalg/SVDSuite.scala
@@ -50,7 +50,7 @@ class SVDSuite extends FunSuite with BeforeAndAfterAll {
val m = matrix.m
val n = matrix.n
val ret = DoubleMatrix.zeros(m, n)
- matrix.data.toArray.map(x => ret.put(x.i, x.j, x.mval))
+ matrix.data.collect().map(x => ret.put(x.i, x.j, x.mval))
ret
}
@@ -106,7 +106,7 @@ class SVDSuite extends FunSuite with BeforeAndAfterAll {
val u = decomposed.U
val s = decomposed.S
val v = decomposed.V
- val retrank = s.data.toArray.length
+ val retrank = s.data.collect().length
assert(retrank == 1, "rank returned not one")
@@ -139,7 +139,7 @@ class SVDSuite extends FunSuite with BeforeAndAfterAll {
val u = decomposed.U
val s = decomposed.S
val v = decomposed.V
- val retrank = s.data.toArray.length
+ val retrank = s.data.collect().length
val densea = getDenseMatrix(a)
val svd = Singular.sparseSVD(densea)