aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbaishuo(白硕) <vc_java@hotmail.com>2014-05-07 16:02:55 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-07 16:02:55 -0700
commit0c19bb161b9b2b96c0c55d3ea09e81fd798cbec0 (patch)
tree43c84b980febf099adaaea8387cb7a5c37c5ddf5
parent3188553f73970270717a7fee4a116e29ad4becc9 (diff)
downloadspark-0c19bb161b9b2b96c0c55d3ea09e81fd798cbec0.tar.gz
spark-0c19bb161b9b2b96c0c55d3ea09e81fd798cbec0.tar.bz2
spark-0c19bb161b9b2b96c0c55d3ea09e81fd798cbec0.zip
Update GradientDescentSuite.scala
use more faster way to construct an array Author: baishuo(白硕) <vc_java@hotmail.com> Closes #588 from baishuo/master and squashes the following commits: 45b95fb [baishuo(白硕)] Update GradientDescentSuite.scala c03b61c [baishuo(白硕)] Update GradientDescentSuite.scala b666d27 [baishuo(白硕)] Update GradientDescentSuite.scala
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala6
1 files changed, 3 insertions, 3 deletions
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
index c4b433499a..8a16284118 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala
@@ -81,11 +81,11 @@ class GradientDescentSuite extends FunSuite with LocalSparkContext with ShouldMa
// Add a extra variable consisting of all 1.0's for the intercept.
val testData = GradientDescentSuite.generateGDInput(A, B, nPoints, 42)
val data = testData.map { case LabeledPoint(label, features) =>
- label -> Vectors.dense(1.0, features.toArray: _*)
+ label -> Vectors.dense(1.0 +: features.toArray)
}
val dataRDD = sc.parallelize(data, 2).cache()
- val initialWeightsWithIntercept = Vectors.dense(1.0, initialWeights: _*)
+ val initialWeightsWithIntercept = Vectors.dense(1.0 +: initialWeights.toArray)
val (_, loss) = GradientDescent.runMiniBatchSGD(
dataRDD,
@@ -111,7 +111,7 @@ class GradientDescentSuite extends FunSuite with LocalSparkContext with ShouldMa
// Add a extra variable consisting of all 1.0's for the intercept.
val testData = GradientDescentSuite.generateGDInput(2.0, -1.5, 10000, 42)
val data = testData.map { case LabeledPoint(label, features) =>
- label -> Vectors.dense(1.0, features.toArray: _*)
+ label -> Vectors.dense(1.0 +: features.toArray)
}
val dataRDD = sc.parallelize(data, 2).cache()