aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2014-08-16 15:14:43 -0700
committerXiangrui Meng <meng@databricks.com>2014-08-16 15:14:43 -0700
commitac6411c6e75906997c78de23dfdbc8d225b87cfd (patch)
tree716ab1116bee4c0b4d447b8a042162ddbe5e5a09 /mllib
parent7e70708a99949549adde00cb6246a9582bbc4929 (diff)
downloadspark-ac6411c6e75906997c78de23dfdbc8d225b87cfd.tar.gz
spark-ac6411c6e75906997c78de23dfdbc8d225b87cfd.tar.bz2
spark-ac6411c6e75906997c78de23dfdbc8d225b87cfd.zip
[SPARK-3081][MLLIB] rename RandomRDDGenerators to RandomRDDs
`RandomRDDGenerators` means factory for `RandomRDDGenerator`. However, its methods return RDDs but not RDDGenerators. So a more proper (and shorter) name would be `RandomRDDs`. dorx brkyvz Author: Xiangrui Meng <meng@databricks.com> Closes #1979 from mengxr/randomrdds and squashes the following commits: b161a2d [Xiangrui Meng] rename RandomRDDGenerators to RandomRDDs
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala (renamed from mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDGenerators.scala)6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala (renamed from mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDGeneratorsSuite.scala)16
3 files changed, 12 insertions, 12 deletions
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
index 18dc087856..4343124f10 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala
@@ -27,7 +27,7 @@ import org.apache.spark.mllib.classification._
import org.apache.spark.mllib.clustering._
import org.apache.spark.mllib.optimization._
import org.apache.spark.mllib.linalg.{Matrix, SparseVector, Vector, Vectors}
-import org.apache.spark.mllib.random.{RandomRDDGenerators => RG}
+import org.apache.spark.mllib.random.{RandomRDDs => RG}
import org.apache.spark.mllib.recommendation._
import org.apache.spark.mllib.regression._
import org.apache.spark.mllib.tree.configuration.{Algo, Strategy}
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDGenerators.scala b/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala
index b0a0593223..3627036952 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDGenerators.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala
@@ -17,6 +17,8 @@
package org.apache.spark.mllib.random
+import scala.reflect.ClassTag
+
import org.apache.spark.SparkContext
import org.apache.spark.annotation.Experimental
import org.apache.spark.mllib.linalg.Vector
@@ -24,14 +26,12 @@ import org.apache.spark.mllib.rdd.{RandomVectorRDD, RandomRDD}
import org.apache.spark.rdd.RDD
import org.apache.spark.util.Utils
-import scala.reflect.ClassTag
-
/**
* :: Experimental ::
* Generator methods for creating RDDs comprised of i.i.d. samples from some distribution.
*/
@Experimental
-object RandomRDDGenerators {
+object RandomRDDs {
/**
* :: Experimental ::
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDGeneratorsSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala
index 96e0bc63b0..c50b78bcbc 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDGeneratorsSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala
@@ -34,7 +34,7 @@ import org.apache.spark.util.StatCounter
*
* TODO update tests to use TestingUtils for floating point comparison after PR 1367 is merged
*/
-class RandomRDDGeneratorsSuite extends FunSuite with LocalSparkContext with Serializable {
+class RandomRDDsSuite extends FunSuite with LocalSparkContext with Serializable {
def testGeneratedRDD(rdd: RDD[Double],
expectedSize: Long,
@@ -113,18 +113,18 @@ class RandomRDDGeneratorsSuite extends FunSuite with LocalSparkContext with Seri
val poissonMean = 100.0
for (seed <- 0 until 5) {
- val uniform = RandomRDDGenerators.uniformRDD(sc, size, numPartitions, seed)
+ val uniform = RandomRDDs.uniformRDD(sc, size, numPartitions, seed)
testGeneratedRDD(uniform, size, numPartitions, 0.5, 1 / math.sqrt(12))
- val normal = RandomRDDGenerators.normalRDD(sc, size, numPartitions, seed)
+ val normal = RandomRDDs.normalRDD(sc, size, numPartitions, seed)
testGeneratedRDD(normal, size, numPartitions, 0.0, 1.0)
- val poisson = RandomRDDGenerators.poissonRDD(sc, poissonMean, size, numPartitions, seed)
+ val poisson = RandomRDDs.poissonRDD(sc, poissonMean, size, numPartitions, seed)
testGeneratedRDD(poisson, size, numPartitions, poissonMean, math.sqrt(poissonMean), 0.1)
}
// mock distribution to check that partitions have unique seeds
- val random = RandomRDDGenerators.randomRDD(sc, new MockDistro(), 1000L, 1000, 0L)
+ val random = RandomRDDs.randomRDD(sc, new MockDistro(), 1000L, 1000, 0L)
assert(random.collect.size === random.collect.distinct.size)
}
@@ -135,13 +135,13 @@ class RandomRDDGeneratorsSuite extends FunSuite with LocalSparkContext with Seri
val poissonMean = 100.0
for (seed <- 0 until 5) {
- val uniform = RandomRDDGenerators.uniformVectorRDD(sc, rows, cols, parts, seed)
+ val uniform = RandomRDDs.uniformVectorRDD(sc, rows, cols, parts, seed)
testGeneratedVectorRDD(uniform, rows, cols, parts, 0.5, 1 / math.sqrt(12))
- val normal = RandomRDDGenerators.normalVectorRDD(sc, rows, cols, parts, seed)
+ val normal = RandomRDDs.normalVectorRDD(sc, rows, cols, parts, seed)
testGeneratedVectorRDD(normal, rows, cols, parts, 0.0, 1.0)
- val poisson = RandomRDDGenerators.poissonVectorRDD(sc, poissonMean, rows, cols, parts, seed)
+ val poisson = RandomRDDs.poissonVectorRDD(sc, poissonMean, rows, cols, parts, seed)
testGeneratedVectorRDD(poisson, rows, cols, parts, poissonMean, math.sqrt(poissonMean), 0.1)
}
}