aboutsummaryrefslogtreecommitdiff
path: root/graph/src
diff options
context:
space:
mode:
authorWang Jianping J <jianping.j.wang@gmail.com>2013-12-21 13:02:59 +0800
committerWang Jianping J <jianping.j.wang@gmail.com>2013-12-21 13:02:59 +0800
commit47eefd30edfb9c13d53729b743bd1a29935977cd (patch)
tree3d1201a8e6b8020666d4edeaa2b4f29069397fce /graph/src
parent343d8977aa7d53f381b014778fb60106f9cbcabb (diff)
downloadspark-47eefd30edfb9c13d53729b743bd1a29935977cd.tar.gz
spark-47eefd30edfb9c13d53729b743bd1a29935977cd.tar.bz2
spark-47eefd30edfb9c13d53729b743bd1a29935977cd.zip
add javadoc
Diffstat (limited to 'graph/src')
-rw-r--r--graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala38
1 files changed, 23 insertions, 15 deletions
diff --git a/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala b/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala
index ffd0ddba7e..28c717bfcb 100644
--- a/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala
+++ b/graph/src/main/scala/org/apache/spark/graph/algorithms/Svdpp.scala
@@ -20,8 +20,15 @@ class Msg ( // message
) extends Serializable
object Svdpp {
- // implement SVD++ based on http://public.research.att.com/~volinsky/netflix/kdd08koren.pdf
- // model (15) on page 6
+ /**
+ * Implement SVD++ based on "Factorization Meets the Neighborhood: a Multifaceted Collaborative Filtering Model",
+ * paper is available at [[http://public.research.att.com/~volinsky/netflix/kdd08koren.pdf]].
+ * The prediction rule is rui = u + bu + bi + qi*(pu + |N(u)|^(-0.5)*sum(y)), see the details on page 6.
+ *
+ * @param edges edges for constructing the graph
+ *
+ * @return a graph with vertex attributes containing the trained model
+ */
def run(edges: RDD[Edge[Double]]): Graph[VT, Double] = {
// defalut parameters
@@ -33,7 +40,8 @@ object Svdpp {
val gamma2 = 0.007
val gamma6 = 0.005
val gamma7 = 0.015
-
+
+ // generate default vertex attribute
def defaultF(rank: Int) = {
val v1 = new ArrayRealVector(rank)
val v2 = new ArrayRealVector(rank)
@@ -70,8 +78,8 @@ object Svdpp {
// calculate initial norm and bias
val t0 = g.mapReduceTriplets(mapF0, reduceF0)
- g.outerJoinVertices(t0) {updateF0}
-
+ g.outerJoinVertices(t0) {updateF0}
+
// phase 1
def mapF1(et: EdgeTriplet[VT, Double]): Iterator[(Vid, RealVector)] = {
assert(et.srcAttr != null && et.dstAttr != null)
@@ -98,10 +106,10 @@ object Svdpp {
pred = math.max(pred, minVal)
pred = math.min(pred, maxVal)
val err = et.attr - pred
- val y = (q.mapMultiply(err*usr.norm)).subtract((itm.v2).mapMultiply(gamma7))
- val newP = (q.mapMultiply(err)).subtract(p.mapMultiply(gamma7)) // for each connected item q
- val newQ = (usr.v2.mapMultiply(err)).subtract(q.mapMultiply(gamma7))
- Iterator((et.srcId, new Msg(newP, y, err - gamma6*usr.bias)), (et.dstId, new Msg(newQ, y, err - gamma6*itm.bias)))
+ val updateY = (q.mapMultiply(err*usr.norm)).subtract((itm.v2).mapMultiply(gamma7))
+ val updateP = (q.mapMultiply(err)).subtract(p.mapMultiply(gamma7))
+ val updateQ = (usr.v2.mapMultiply(err)).subtract(q.mapMultiply(gamma7))
+ Iterator((et.srcId, new Msg(updateP, updateY, err - gamma6*usr.bias)), (et.dstId, new Msg(updateQ, updateY, err - gamma6*itm.bias)))
}
def reduceF2(g1: Msg, g2: Msg):Msg = {
g1.v1 = g1.v1.add(g2.v1)
@@ -112,7 +120,7 @@ object Svdpp {
def updateF2(vid: Vid, vd: VT, msg: Option[Msg]) = {
if (msg.isDefined) {
vd.v1 = vd.v1.add(msg.get.v1.mapMultiply(gamma2))
- if (vid % 2 == 1) { // item node update y
+ if (vid % 2 == 1) { // item nodes update y
vd.v2 = vd.v2.add(msg.get.v2.mapMultiply(gamma2))
}
vd.bias += msg.get.bias*gamma1
@@ -121,14 +129,14 @@ object Svdpp {
}
for (i <- 0 until maxIters) {
- // phase 1
+ // phase 1, calculate v2 for user nodes
val t1: VertexRDD[RealVector] = g.mapReduceTriplets(mapF1, reduceF1)
- g.outerJoinVertices(t1) {updateF1}
- // phase 2
+ g.outerJoinVertices(t1) {updateF1}
+ // phase 2, update p for user nodes and q, y for item nodes
val t2: VertexRDD[Msg] = g.mapReduceTriplets(mapF2, reduceF2)
g.outerJoinVertices(t2) {updateF2}
}
-
+
// calculate error on training set
def mapF3(et: EdgeTriplet[VT, Double]): Iterator[(Vid, Double)] = {
assert(et.srcAttr != null && et.dstAttr != null)
@@ -143,7 +151,7 @@ object Svdpp {
Iterator((et.dstId, err))
}
def updateF3(vid: Vid, vd: VT, msg: Option[Double]) = {
- if (msg.isDefined && vid % 2 == 1) { // item sum up the errors
+ if (msg.isDefined && vid % 2 == 1) { // item nodes sum up the errors
vd.norm = msg.get
}
vd