aboutsummaryrefslogtreecommitdiff
path: root/mllib
diff options
context:
space:
mode:
authorEvan Sparks <sparks@cs.berkeley.edu>2013-08-16 17:48:26 -0700
committerShivaram Venkataraman <shivaram@eecs.berkeley.edu>2013-08-18 15:03:13 -0700
commitb291db712e73fdff0c02946bac96e330b089409d (patch)
tree42673ff425dbe405c95453199a82ca454b570693 /mllib
parentb659af83d3f91f0f339d874b2742ddca20a9f610 (diff)
downloadspark-b291db712e73fdff0c02946bac96e330b089409d.tar.gz
spark-b291db712e73fdff0c02946bac96e330b089409d.tar.bz2
spark-b291db712e73fdff0c02946bac96e330b089409d.zip
Centralizing linear data generator and mllib regression tests to use it.
Diffstat (limited to 'mllib')
-rw-r--r--mllib/src/main/scala/spark/mllib/util/LassoDataGenerator.scala48
-rw-r--r--mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/RidgeRegressionDataGenerator.scala)76
-rw-r--r--mllib/src/main/scala/spark/mllib/util/LinearRegressionDataGenerator.scala98
-rw-r--r--mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java11
-rw-r--r--mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java9
-rw-r--r--mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java9
-rw-r--r--mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala39
-rw-r--r--mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala38
-rw-r--r--mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala38
9 files changed, 84 insertions, 282 deletions
diff --git a/mllib/src/main/scala/spark/mllib/util/LassoDataGenerator.scala b/mllib/src/main/scala/spark/mllib/util/LassoDataGenerator.scala
deleted file mode 100644
index eeb14fc4e3..0000000000
--- a/mllib/src/main/scala/spark/mllib/util/LassoDataGenerator.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-package spark.mllib.util
-
-import scala.util.Random
-
-import org.jblas.DoubleMatrix
-
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
-
-/**
- * Generate sample data used for Lasso Regression. This class generates uniform random values
- * for the features and adds Gaussian noise with weight 0.1 to generate response variables.
- */
-object LassoDataGenerator {
-
- def main(args: Array[String]) {
- if (args.length < 2) {
- println("Usage: LassoGenerator " +
- "<master> <output_dir> [num_examples] [num_features] [num_partitions]")
- System.exit(1)
- }
-
- val sparkMaster: String = args(0)
- val outputPath: String = args(1)
- val nexamples: Int = if (args.length > 2) args(2).toInt else 1000
- val nfeatures: Int = if (args.length > 3) args(3).toInt else 2
- val parts: Int = if (args.length > 4) args(4).toInt else 2
-
- val sc = new SparkContext(sparkMaster, "LassoGenerator")
-
- val globalRnd = new Random(94720)
- val trueWeights = new DoubleMatrix(1, nfeatures+1,
- Array.fill[Double](nfeatures + 1) { globalRnd.nextGaussian() }:_*)
-
- val data: RDD[LabeledPoint] = sc.parallelize(0 until nexamples, parts).map { idx =>
- val rnd = new Random(42 + idx)
-
- val x = Array.fill[Double](nfeatures) {
- rnd.nextDouble() * 2.0 - 1.0
- }
- val y = (new DoubleMatrix(1, x.length, x:_*)).dot(trueWeights) + rnd.nextGaussian() * 0.1
- LabeledPoint(y, x)
- }
-
- MLUtils.saveLabeledData(data, outputPath)
- sc.stop()
- }
-}
diff --git a/mllib/src/main/scala/spark/mllib/util/RidgeRegressionDataGenerator.scala b/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala
index 08dce723b8..8fe3ab4754 100644
--- a/mllib/src/main/scala/spark/mllib/util/RidgeRegressionDataGenerator.scala
+++ b/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala
@@ -23,54 +23,92 @@ import org.jblas.DoubleMatrix
import spark.{RDD, SparkContext}
import spark.mllib.regression.LabeledPoint
+import scala.collection.JavaConversions._
+import spark.mllib.regression.LabeledPoint
/**
- * Generate sample data used for RidgeRegression. This class generates
+ * Generate sample data used for Linear Data. This class generates
* uniformly random values for every feature and adds Gaussian noise with mean `eps` to the
* response variable `Y`.
*
*/
-object RidgeRegressionDataGenerator {
+object LinearDataGenerator {
+
+ /**
+ * Return a Java List of synthetic data randomly generated according to a multi
+ * collinear model.
+ * @param intercept Data intercept
+ * @param weights Weights to be applied.
+ * @param nPoints Number of points in sample.
+ * @param seed Random seed
+ * @return Java List of input.
+ */
+ def generateLinearInputAsList(
+ intercept: Double,
+ weights: Array[Double],
+ nPoints: Int,
+ seed: Int): java.util.List[LabeledPoint] = {
+ seqAsJavaList(generateLinearInput(intercept, weights, nPoints, seed))
+ }
/**
- * Generate an RDD containing sample data for RidgeRegression.
+ *
+ * @param intercept Data intercept
+ * @param weights Weights to be applied.
+ * @param nPoints Number of points in sample.
+ * @param seed Random seed
+ * @param eps Epsilon scaling factor.
+ * @return
+ */
+ def generateLinearInput(
+ intercept: Double,
+ weights: Array[Double],
+ nPoints: Int,
+ seed: Int,
+ eps: Double = 0.1): Seq[LabeledPoint] = {
+
+ val rnd = new Random(seed)
+ val weightsMat = new DoubleMatrix(1, weights.length, weights:_*)
+ val x = Array.fill[Array[Double]](nPoints)(
+ Array.fill[Double](weights.length)(rnd.nextGaussian()))
+ val y = x.map(xi =>
+ (new DoubleMatrix(1, xi.length, xi:_*)).dot(weightsMat) + intercept + eps * rnd.nextGaussian()
+ )
+ y.zip(x).map(p => LabeledPoint(p._1, p._2))
+ }
+
+ /**
+ * Generate an RDD containing sample data for Linear Regression models - including Ridge, Lasso,
+ * and uregularized variants.
*
* @param sc SparkContext to be used for generating the RDD.
* @param nexamples Number of examples that will be contained in the RDD.
* @param nfeatures Number of features to generate for each example.
* @param eps Epsilon factor by which examples are scaled.
+ * @param weights Weights associated with the first weights.length features.
* @param nparts Number of partitions in the RDD. Default value is 2.
*
* @return RDD of LabeledPoint containing sample data.
*/
- def generateRidgeRDD(
+ def generateLinearRDD(
sc: SparkContext,
nexamples: Int,
nfeatures: Int,
eps: Double,
+ weights: Array[Double] = Array[Double](),
nparts: Int = 2,
intercept: Double = 0.0) : RDD[LabeledPoint] = {
org.jblas.util.Random.seed(42)
// Random values distributed uniformly in [-0.5, 0.5]
val w = DoubleMatrix.rand(nfeatures, 1).subi(0.5)
- w.put(0, 0, 10)
- w.put(1, 0, 10)
+
+ (0 until weights.length.max(nfeatures)).map(i => w.put(i, 0, weights(i)))
val data: RDD[LabeledPoint] = sc.parallelize(0 until nparts, nparts).flatMap { p =>
- org.jblas.util.Random.seed(42 + p)
+ val seed = 42+p
val examplesInPartition = nexamples / nparts
- val X = DoubleMatrix.rand(examplesInPartition, nfeatures)
- val y = X.mmul(w).add(intercept)
-
- val rnd = new Random(42 + p)
-
- val normalValues = Array.fill[Double](examplesInPartition)(rnd.nextGaussian() * eps)
- val yObs = new DoubleMatrix(normalValues).addi(y)
-
- Iterator.tabulate(examplesInPartition) { i =>
- LabeledPoint(yObs.get(i, 0), X.getRow(i).toArray)
- }
+ generateLinearInput(intercept, w.toArray, examplesInPartition, seed, eps)
}
data
}
@@ -90,7 +128,7 @@ object RidgeRegressionDataGenerator {
val eps = 10
val sc = new SparkContext(sparkMaster, "RidgeRegressionDataGenerator")
- val data = generateRidgeRDD(sc, nexamples, nfeatures, eps, parts)
+ val data = generateLinearRDD(sc, nexamples, nfeatures, eps, nparts = parts)
MLUtils.saveLabeledData(data, outputPath)
sc.stop()
diff --git a/mllib/src/main/scala/spark/mllib/util/LinearRegressionDataGenerator.scala b/mllib/src/main/scala/spark/mllib/util/LinearRegressionDataGenerator.scala
deleted file mode 100644
index 39e2a30b55..0000000000
--- a/mllib/src/main/scala/spark/mllib/util/LinearRegressionDataGenerator.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package spark.mllib.util
-
-import scala.util.Random
-
-import org.jblas.DoubleMatrix
-
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
-
-/**
- * Generate sample data used for LinearRegression. This class generates
- * uniformly random values for every feature and adds Gaussian noise with mean `eps` to the
- * response variable `Y`.
- *
- */
-object LinearRegressionDataGenerator {
-
- /**
- * Generate an RDD containing sample data for LinearRegression.
- *
- * @param sc SparkContext to be used for generating the RDD.
- * @param nexamples Number of examples that will be contained in the RDD.
- * @param nfeatures Number of features to generate for each example.
- * @param eps Epsilon factor by which examples are scaled.
- * @param nparts Number of partitions in the RDD. Default value is 2.
- *
- * @return RDD of LabeledPoint containing sample data.
- */
- def generateLinearRDD(
- sc: SparkContext,
- nexamples: Int,
- nfeatures: Int,
- eps: Double,
- nparts: Int = 2,
- intercept: Double = 0.0) : RDD[LabeledPoint] = {
- org.jblas.util.Random.seed(42)
- // Random values distributed uniformly in [-0.5, 0.5]
- val w = DoubleMatrix.rand(nfeatures, 1).subi(0.5)
- w.put(0, 0, 10)
- w.put(1, 0, 10)
-
- val data: RDD[LabeledPoint] = sc.parallelize(0 until nparts, nparts).flatMap { p =>
- org.jblas.util.Random.seed(42 + p)
- val examplesInPartition = nexamples / nparts
-
- val X = DoubleMatrix.rand(examplesInPartition, nfeatures)
- val y = X.mmul(w).add(intercept)
-
- val rnd = new Random(42 + p)
-
- val normalValues = Array.fill[Double](examplesInPartition)(rnd.nextGaussian() * eps)
- val yObs = new DoubleMatrix(normalValues).addi(y)
-
- Iterator.tabulate(examplesInPartition) { i =>
- LabeledPoint(yObs.get(i, 0), X.getRow(i).toArray)
- }
- }
- data
- }
-
- def main(args: Array[String]) {
- if (args.length < 2) {
- println("Usage: LinearRegressionGenerator " +
- "<master> <output_dir> [num_examples] [num_features] [num_partitions]")
- System.exit(1)
- }
-
- val sparkMaster: String = args(0)
- val outputPath: String = args(1)
- val nexamples: Int = if (args.length > 2) args(2).toInt else 1000
- val nfeatures: Int = if (args.length > 3) args(3).toInt else 100
- val parts: Int = if (args.length > 4) args(4).toInt else 2
- val eps = 10
-
- val sc = new SparkContext(sparkMaster, "LinearRegressionDataGenerator")
- val data = generateLinearRDD(sc, nexamples, nfeatures, eps, parts)
-
- MLUtils.saveLabeledData(data, outputPath)
- sc.stop()
- }
-}
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java b/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
index e26d7b385c..8d692c2d0d 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
+++ b/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
@@ -27,6 +27,7 @@ import org.junit.Test;
import spark.api.java.JavaRDD;
import spark.api.java.JavaSparkContext;
+import spark.mllib.util.LinearDataGenerator;
public class JavaLassoSuite implements Serializable {
private transient JavaSparkContext sc;
@@ -61,10 +62,10 @@ public class JavaLassoSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(LassoSuite.generateLassoInputAsList(A,
- weights, nPoints, 42), 2).cache();
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
+ weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- LassoSuite.generateLassoInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
LassoWithSGD svmSGDImpl = new LassoWithSGD();
svmSGDImpl.optimizer().setStepSize(1.0)
@@ -82,10 +83,10 @@ public class JavaLassoSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(LassoSuite.generateLassoInputAsList(A,
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- LassoSuite.generateLassoInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
LassoModel model = LassoWithSGD.train(testRDD.rdd(), 100, 1.0, 0.01, 1.0);
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java b/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
index 14d3d4ef39..d2d8a62980 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
+++ b/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
@@ -27,6 +27,7 @@ import org.junit.Test;
import spark.api.java.JavaRDD;
import spark.api.java.JavaSparkContext;
+import spark.mllib.util.LinearDataGenerator;
public class JavaLinearRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
@@ -61,10 +62,10 @@ public class JavaLinearRegressionSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearRegressionSuite.generateLinearRegressionInputAsList(A,
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- LinearRegressionSuite.generateLinearRegressionInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
LinearRegressionWithSGD svmSGDImpl = new LinearRegressionWithSGD();
svmSGDImpl.optimizer().setStepSize(1.0)
@@ -82,10 +83,10 @@ public class JavaLinearRegressionSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearRegressionSuite.generateLinearRegressionInputAsList(A,
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- LinearRegressionSuite.generateLinearRegressionInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
LinearRegressionModel model = LinearRegressionWithSGD.train(testRDD.rdd(), 100, 1.0, 1.0);
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java b/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
index 4f379b51d5..72ab875985 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
+++ b/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
@@ -27,6 +27,7 @@ import org.junit.Test;
import spark.api.java.JavaRDD;
import spark.api.java.JavaSparkContext;
+import spark.mllib.util.LinearDataGenerator;
public class JavaRidgeRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
@@ -61,10 +62,10 @@ public class JavaRidgeRegressionSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(RidgeRegressionSuite.generateRidgeRegressionInputAsList(A,
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- RidgeRegressionSuite.generateRidgeRegressionInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
RidgeRegressionWithSGD svmSGDImpl = new RidgeRegressionWithSGD();
svmSGDImpl.optimizer().setStepSize(1.0)
@@ -82,10 +83,10 @@ public class JavaRidgeRegressionSuite implements Serializable {
double A = 2.0;
double[] weights = {-1.5, 1.0e-2};
- JavaRDD<LabeledPoint> testRDD = sc.parallelize(RidgeRegressionSuite.generateRidgeRegressionInputAsList(A,
+ JavaRDD<LabeledPoint> testRDD = sc.parallelize(LinearDataGenerator.generateLinearInputAsList(A,
weights, nPoints, 42), 2).cache();
List<LabeledPoint> validationData =
- RidgeRegressionSuite.generateRidgeRegressionInputAsList(A, weights, nPoints, 17);
+ LinearDataGenerator.generateLinearInputAsList(A, weights, nPoints, 17);
RidgeRegressionModel model = RidgeRegressionWithSGD.train(testRDD.rdd(), 100, 1.0, 0.01, 1.0);
diff --git a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala b/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
index 55a738f1e4..622dbbab7f 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
@@ -24,37 +24,8 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import spark.SparkContext
+import spark.mllib.util.LinearDataGenerator
-import org.jblas.DoubleMatrix
-
-object LassoSuite {
-
- def generateLassoInputAsList(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): java.util.List[LabeledPoint] = {
- seqAsJavaList(generateLassoInput(intercept, weights, nPoints, seed))
- }
-
-
- // Generate noisy input of the form Y = x.dot(weights) + intercept + noise
- def generateLassoInput(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): Seq[LabeledPoint] = {
- val rnd = new Random(seed)
- val weightsMat = new DoubleMatrix(1, weights.length, weights:_*)
- val x = Array.fill[Array[Double]](nPoints)(
- Array.fill[Double](weights.length)(rnd.nextGaussian()))
- val y = x.map(xi =>
- (new DoubleMatrix(1, xi.length, xi:_*)).dot(weightsMat) + intercept + 0.1 * rnd.nextGaussian()
- )
- y.zip(x).map(p => LabeledPoint(p._1, p._2))
- }
-
-}
class LassoSuite extends FunSuite with BeforeAndAfterAll {
@transient private var sc: SparkContext = _
@@ -85,7 +56,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
val B = -1.5
val C = 1.0e-2
- val testData = LassoSuite.generateLassoInput(A, Array[Double](B,C), nPoints, 42)
+ val testData = LinearDataGenerator.generateLinearInput(A, Array[Double](B,C), nPoints, 42)
val testRDD = sc.parallelize(testData, 2)
testRDD.cache()
@@ -101,7 +72,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
assert(weight0 >= -1.60 && weight0 <= -1.40, weight0 + " not in [-1.6, -1.4]")
assert(weight1 >= -1.0e-3 && weight1 <= 1.0e-3, weight1 + " not in [-0.001, 0.001]")
- val validationData = LassoSuite.generateLassoInput(A, Array[Double](B,C), nPoints, 17)
+ val validationData = LinearDataGenerator.generateLinearInput(A, Array[Double](B,C), nPoints, 17)
val validationRDD = sc.parallelize(validationData, 2)
// Test prediction on RDD.
@@ -118,7 +89,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
val B = -1.5
val C = 1.0e-2
- val testData = LassoSuite.generateLassoInput(A, Array[Double](B,C), nPoints, 42)
+ val testData = LinearDataGenerator.generateLinearInput(A, Array[Double](B,C), nPoints, 42)
val initialB = -1.0
val initialC = -1.0
@@ -138,7 +109,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
assert(weight0 >= -1.60 && weight0 <= -1.40, weight0 + " not in [-1.6, -1.4]")
assert(weight1 >= -1.0e-3 && weight1 <= 1.0e-3, weight1 + " not in [-0.001, 0.001]")
- val validationData = LassoSuite.generateLassoInput(A, Array[Double](B,C), nPoints, 17)
+ val validationData = LinearDataGenerator.generateLinearInput(A, Array[Double](B,C), nPoints, 17)
val validationRDD = sc.parallelize(validationData,2)
// Test prediction on RDD.
diff --git a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala b/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
index c794c1cac5..3d22b7d385 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
@@ -17,46 +17,12 @@
package spark.mllib.regression
-import scala.collection.JavaConversions._
-import scala.util.Random
-
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import spark.SparkContext
import spark.SparkContext._
-import spark.mllib.util.LinearRegressionDataGenerator
-import spark.mllib.regression.LabeledPoint
-import org.jblas.DoubleMatrix
-
-object LinearRegressionSuite {
-
- def generateLinearRegressionInputAsList(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): java.util.List[LabeledPoint] = {
- seqAsJavaList(generateLinearRegressionInput(intercept, weights, nPoints, seed))
- }
-
-
- // Generate noisy input of the form Y = x.dot(weights) + intercept + noise
- def generateLinearRegressionInput(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): Seq[LabeledPoint] = {
- val rnd = new Random(seed)
- val weightsMat = new DoubleMatrix(1, weights.length, weights:_*)
- val x = Array.fill[Array[Double]](nPoints)(
- Array.fill[Double](weights.length)(rnd.nextGaussian()))
- val y = x.map(xi =>
- (new DoubleMatrix(1, xi.length, xi:_*)).dot(weightsMat) + intercept + 0.1 * rnd.nextGaussian()
- )
- y.zip(x).map(p => LabeledPoint(p._1, p._2))
- }
-
-}
+import spark.mllib.util.LinearDataGenerator
class LinearRegressionSuite extends FunSuite with BeforeAndAfterAll {
@transient private var sc: SparkContext = _
@@ -73,7 +39,7 @@ class LinearRegressionSuite extends FunSuite with BeforeAndAfterAll {
// Test if we can correctly learn Y = 3 + 10*X1 + 10*X2 when
// X1 and X2 are collinear.
test("multi-collinear variables") {
- val testRDD = LinearRegressionDataGenerator.generateLinearRDD(sc, 100, 2, 0.0, intercept=3.0).cache()
+ val testRDD = LinearDataGenerator.generateLinearRDD(sc, 100, 2, 0.0, Array(10.0, 10.0), intercept=3.0).cache()
val linReg = new LinearRegressionWithSGD()
linReg.optimizer.setNumIterations(1000).setStepSize(1.0)
diff --git a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala b/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
index aaac083ad9..0237ccdf87 100644
--- a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
@@ -25,37 +25,7 @@ import org.scalatest.FunSuite
import spark.SparkContext
import spark.SparkContext._
-import spark.mllib.util.RidgeRegressionDataGenerator
-import org.jblas.DoubleMatrix
-
-object RidgeRegressionSuite {
-
- def generateRidgeRegressionInputAsList(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): java.util.List[LabeledPoint] = {
- seqAsJavaList(generateRidgeRegressionInput(intercept, weights, nPoints, seed))
- }
-
-
- // Generate noisy input of the form Y = x.dot(weights) + intercept + noise
- def generateRidgeRegressionInput(
- intercept: Double,
- weights: Array[Double],
- nPoints: Int,
- seed: Int): Seq[LabeledPoint] = {
- val rnd = new Random(seed)
- val weightsMat = new DoubleMatrix(1, weights.length, weights:_*)
- val x = Array.fill[Array[Double]](nPoints)(
- Array.fill[Double](weights.length)(rnd.nextGaussian()))
- val y = x.map(xi =>
- (new DoubleMatrix(1, xi.length, xi:_*)).dot(weightsMat) + intercept + 0.1 * rnd.nextGaussian()
- )
- y.zip(x).map(p => LabeledPoint(p._1, p._2))
- }
-
-}
+import spark.mllib.util.LinearDataGenerator
class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
@@ -73,7 +43,7 @@ class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
// Test if we can correctly learn Y = 3 + 10*X1 + 10*X2 when
// X1 and X2 are collinear.
test("multi-collinear variables") {
- val testRDD = RidgeRegressionDataGenerator.generateRidgeRDD(sc, 100, 2, 0.0, intercept=3.0).cache()
+ val testRDD = LinearDataGenerator.generateLinearRDD(sc, 100, 2, 0.0, Array(10.0, 10.0), intercept=3.0).cache()
val ridgeReg = new RidgeRegressionWithSGD()
ridgeReg.optimizer.setNumIterations(1000).setRegParam(0.0).setStepSize(1.0)
@@ -86,7 +56,7 @@ class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
}
test("multi-collinear variables with regularization") {
- val testRDD = RidgeRegressionDataGenerator.generateRidgeRDD(sc, 100, 2, 0.0, intercept=3.0).cache()
+ val testRDD = LinearDataGenerator.generateLinearRDD(sc, 100, 2, 0.0, Array(10.0, 10.0), intercept=3.0).cache()
val ridgeReg = new RidgeRegressionWithSGD()
ridgeReg.optimizer.setNumIterations(1000).setRegParam(1.0).setStepSize(1.0)
@@ -94,7 +64,7 @@ class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
assert(model.intercept <= 5.0)
assert(model.weights.length === 2)
- assert(model.weights(0) <= 3.0)
+ assert(model.weights(0) <= 4.0)
assert(model.weights(1) <= 3.0)
}
}