diff options
author | Xiangrui Meng <meng@databricks.com> | 2014-08-13 16:20:49 -0700 |
---|---|---|
committer | Xiangrui Meng <meng@databricks.com> | 2014-08-13 16:20:49 -0700 |
commit | 7ecb867c4cd6916b6cb12f2ece1a4c88591ad5b5 (patch) | |
tree | 61fbbdd729c12acd83ff2e25fc111595737fa3ca /mllib | |
parent | 434bea1c002b597cff9db899da101490e1f1e9ed (diff) | |
download | spark-7ecb867c4cd6916b6cb12f2ece1a4c88591ad5b5.tar.gz spark-7ecb867c4cd6916b6cb12f2ece1a4c88591ad5b5.tar.bz2 spark-7ecb867c4cd6916b6cb12f2ece1a4c88591ad5b5.zip |
[MLLIB] use Iterator.fill instead of Array.fill
Iterator.fill uses less memory
Author: Xiangrui Meng <meng@databricks.com>
Closes #1930 from mengxr/rand-gen-iter and squashes the following commits:
24178ca [Xiangrui Meng] use Iterator.fill instead of Array.fill
Diffstat (limited to 'mllib')
-rw-r--r-- | mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala b/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala index c8db3910c6..910eff9540 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/rdd/RandomRDD.scala @@ -105,16 +105,16 @@ private[mllib] object RandomRDD { def getPointIterator[T: ClassTag](partition: RandomRDDPartition[T]): Iterator[T] = { val generator = partition.generator.copy() generator.setSeed(partition.seed) - Array.fill(partition.size)(generator.nextValue()).toIterator + Iterator.fill(partition.size)(generator.nextValue()) } // The RNG has to be reset every time the iterator is requested to guarantee same data // every time the content of the RDD is examined. - def getVectorIterator(partition: RandomRDDPartition[Double], - vectorSize: Int): Iterator[Vector] = { + def getVectorIterator( + partition: RandomRDDPartition[Double], + vectorSize: Int): Iterator[Vector] = { val generator = partition.generator.copy() generator.setSeed(partition.seed) - Array.fill(partition.size)(new DenseVector( - (0 until vectorSize).map { _ => generator.nextValue() }.toArray)).toIterator + Iterator.fill(partition.size)(new DenseVector(Array.fill(vectorSize)(generator.nextValue()))) } } |