aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnkur Dave <ankurdave@gmail.com>2014-01-09 14:05:09 -0800
committerAnkur Dave <ankurdave@gmail.com>2014-01-09 14:05:09 -0800
commit100718bcd3f6ade1a93256458ec1528bb9142b5e (patch)
treebea33f5d033baebb9fe1010648d2da6d0b36b6fc
parent43e1bdc80c2b19533596df74fd7b97a2d7b84bb6 (diff)
downloadspark-100718bcd3f6ade1a93256458ec1528bb9142b5e.tar.gz
spark-100718bcd3f6ade1a93256458ec1528bb9142b5e.tar.bz2
spark-100718bcd3f6ade1a93256458ec1528bb9142b5e.zip
Svdpp -> SVDPlusPlus
-rw-r--r--graph/src/main/scala/org/apache/spark/graph/algorithms/SVDPlusPlus.scala (renamed from graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala)12
-rw-r--r--graph/src/test/scala/org/apache/spark/graph/algorithms/SVDPlusPlusSuite.scala (renamed from graph/src/test/scala/org/apache/spark/graph/algorithms/SvdppSuite.scala)10
2 files changed, 11 insertions, 11 deletions
diff --git a/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala b/graph/src/main/scala/org/apache/spark/graph/algorithms/SVDPlusPlus.scala
index 85fa23d309..083aa30538 100644
--- a/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala
+++ b/graph/src/main/scala/org/apache/spark/graph/algorithms/SVDPlusPlus.scala
@@ -5,7 +5,7 @@ import org.apache.spark.graph._
import scala.util.Random
import org.apache.commons.math.linear._
-class SvdppConf( // Svdpp parameters
+class SVDPlusPlusConf( // SVDPlusPlus parameters
var rank: Int,
var maxIters: Int,
var minVal: Double,
@@ -15,7 +15,7 @@ class SvdppConf( // Svdpp parameters
var gamma6: Double,
var gamma7: Double) extends Serializable
-object Svdpp {
+object SVDPlusPlus {
/**
* Implement SVD++ based on "Factorization Meets the Neighborhood: a Multifaceted Collaborative Filtering Model",
* paper is available at [[http://public.research.att.com/~volinsky/netflix/kdd08koren.pdf]].
@@ -23,12 +23,12 @@ object Svdpp {
*
* @param edges edges for constructing the graph
*
- * @param conf Svdpp parameters
+ * @param conf SVDPlusPlus parameters
*
* @return a graph with vertex attributes containing the trained model
*/
- def run(edges: RDD[Edge[Double]], conf: SvdppConf): (Graph[(RealVector, RealVector, Double, Double), Double], Double) = {
+ def run(edges: RDD[Edge[Double]], conf: SVDPlusPlusConf): (Graph[(RealVector, RealVector, Double, Double), Double], Double) = {
// generate default vertex attribute
def defaultF(rank: Int): (RealVector, RealVector, Double, Double) = {
@@ -55,7 +55,7 @@ object Svdpp {
(vd._1, vd._2, msg.get._2 / msg.get._1, 1.0 / scala.math.sqrt(msg.get._1))
}
- def mapTrainF(conf: SvdppConf, u: Double)(et: EdgeTriplet[(RealVector, RealVector, Double, Double), Double])
+ def mapTrainF(conf: SVDPlusPlusConf, u: Double)(et: EdgeTriplet[(RealVector, RealVector, Double, Double), Double])
: Iterator[(VertexID, (RealVector, RealVector, Double))] = {
val (usr, itm) = (et.srcAttr, et.dstAttr)
val (p, q) = (usr._1, itm._1)
@@ -85,7 +85,7 @@ object Svdpp {
}
// calculate error on training set
- def mapTestF(conf: SvdppConf, u: Double)(et: EdgeTriplet[(RealVector, RealVector, Double, Double), Double]): Iterator[(VertexID, Double)] = {
+ def mapTestF(conf: SVDPlusPlusConf, u: Double)(et: EdgeTriplet[(RealVector, RealVector, Double, Double), Double]): Iterator[(VertexID, Double)] = {
val (usr, itm) = (et.srcAttr, et.dstAttr)
val (p, q) = (usr._1, itm._1)
var pred = u + usr._3 + itm._3 + q.dotProduct(usr._2)
diff --git a/graph/src/test/scala/org/apache/spark/graph/algorithms/SvdppSuite.scala b/graph/src/test/scala/org/apache/spark/graph/algorithms/SVDPlusPlusSuite.scala
index 411dd3d336..a0a6eb33e3 100644
--- a/graph/src/test/scala/org/apache/spark/graph/algorithms/SvdppSuite.scala
+++ b/graph/src/test/scala/org/apache/spark/graph/algorithms/SVDPlusPlusSuite.scala
@@ -9,21 +9,21 @@ import org.apache.spark.graph.util.GraphGenerators
import org.apache.spark.rdd._
-class SvdppSuite extends FunSuite with LocalSparkContext {
+class SVDPlusPlusSuite extends FunSuite with LocalSparkContext {
test("Test SVD++ with mean square error on training set") {
withSpark { sc =>
- val SvdppErr = 8.0
+ val svdppErr = 8.0
val edges = sc.textFile("mllib/data/als/test.data").map { line =>
val fields = line.split(",")
Edge(fields(0).toLong * 2, fields(1).toLong * 2 + 1, fields(2).toDouble)
}
- val conf = new SvdppConf(10, 2, 0.0, 5.0, 0.007, 0.007, 0.005, 0.015) // 2 iterations
- var (graph, u) = Svdpp.run(edges, conf)
+ val conf = new SVDPlusPlusConf(10, 2, 0.0, 5.0, 0.007, 0.007, 0.005, 0.015) // 2 iterations
+ var (graph, u) = SVDPlusPlus.run(edges, conf)
val err = graph.vertices.collect.map{ case (vid, vd) =>
if (vid % 2 == 1) vd._4 else 0.0
}.reduce(_ + _) / graph.triplets.collect.size
- assert(err <= SvdppErr)
+ assert(err <= svdppErr)
}
}