aboutsummaryrefslogtreecommitdiff
path: root/graphx
diff options
context:
space:
mode:
authorAnkur Dave <ankurdave@gmail.com>2014-01-12 21:47:16 -0800
committerAnkur Dave <ankurdave@gmail.com>2014-01-12 21:47:16 -0800
commitd691e9f47ed9b43b422712047183142d01c5e8c2 (patch)
tree41dcc5954b14c08dc63714e7e3c62ff57a86f61d /graphx
parent20c509b805dbfd0ebb11d2d7bd53a4379249a86f (diff)
downloadspark-d691e9f47ed9b43b422712047183142d01c5e8c2.tar.gz
spark-d691e9f47ed9b43b422712047183142d01c5e8c2.tar.bz2
spark-d691e9f47ed9b43b422712047183142d01c5e8c2.zip
Move algorithms to GraphOps
Diffstat (limited to 'graphx')
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/Graph.scala4
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala51
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/Algorithms.scala66
-rw-r--r--graphx/src/main/scala/org/apache/spark/graphx/lib/package.scala8
4 files changed, 51 insertions, 78 deletions
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala b/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
index 56513cac20..7d4f0de3d6 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
@@ -15,9 +15,7 @@ import org.apache.spark.storage.StorageLevel
* RDDs, the graph is a functional data-structure in which mutating
* operations return new graphs.
*
- * @note [[GraphOps]] contains additional convenience operations.
- * [[lib.Algorithms]] contains graph algorithms; to access these,
- * import `org.apache.spark.graphx.lib._`.
+ * @note [[GraphOps]] contains additional convenience operations and graph algorithms.
*
* @tparam VD the vertex attribute type
* @tparam ED the edge attribute type
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
index 4fdff29f5a..2b3b95e2ca 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
@@ -2,9 +2,10 @@ package org.apache.spark.graphx
import scala.reflect.ClassTag
-import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext._
import org.apache.spark.SparkException
+import org.apache.spark.graphx.lib._
+import org.apache.spark.rdd.RDD
/**
* Contains additional functionality for [[Graph]]. All operations are expressed in terms of the
@@ -298,4 +299,52 @@ class GraphOps[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]) {
Pregel(graph, initialMsg, maxIterations, activeDirection)(vprog, sendMsg, mergeMsg)
}
+ /**
+ * Run a dynamic version of PageRank returning a graph with vertex attributes containing the
+ * PageRank and edge attributes containing the normalized edge weight.
+ *
+ * @see [[org.apache.spark.graphx.lib.PageRank]], method `runUntilConvergence`.
+ */
+ def pageRank(tol: Double, resetProb: Double = 0.15): Graph[Double, Double] = {
+ PageRank.runUntilConvergence(graph, tol, resetProb)
+ }
+
+ /**
+ * Run PageRank for a fixed number of iterations returning a graph with vertex attributes
+ * containing the PageRank and edge attributes the normalized edge weight.
+ *
+ * @see [[org.apache.spark.graphx.lib.PageRank]], method `run`.
+ */
+ def staticPageRank(numIter: Int, resetProb: Double = 0.15): Graph[Double, Double] = {
+ PageRank.run(graph, numIter, resetProb)
+ }
+
+ /**
+ * Compute the connected component membership of each vertex and return a graph with the vertex
+ * value containing the lowest vertex id in the connected component containing that vertex.
+ *
+ * @see [[org.apache.spark.graphx.lib.ConnectedComponents]]
+ */
+ def connectedComponents(): Graph[VertexID, ED] = {
+ ConnectedComponents.run(graph)
+ }
+
+ /**
+ * Compute the number of triangles passing through each vertex.
+ *
+ * @see [[org.apache.spark.graphx.lib.TriangleCount]]
+ */
+ def triangleCount(): Graph[Int, ED] = {
+ TriangleCount.run(graph)
+ }
+
+ /**
+ * Compute the strongly connected component (SCC) of each vertex and return a graph with the
+ * vertex value containing the lowest vertex id in the SCC containing that vertex.
+ *
+ * @see [[org.apache.spark.graphx.lib.StronglyConnectedComponents]]
+ */
+ def stronglyConnectedComponents(numIter: Int): Graph[VertexID, ED] = {
+ StronglyConnectedComponents.run(graph, numIter)
+ }
} // end of GraphOps
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/Algorithms.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/Algorithms.scala
deleted file mode 100644
index cbcd9c24a0..0000000000
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/Algorithms.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.spark.graphx.lib
-
-import scala.reflect.ClassTag
-
-import org.apache.spark.graphx._
-
-/**
- * Provides graph algorithms directly on [[org.apache.spark.graphx.Graph]] via an implicit
- * conversion.
- * @example
- * {{{
- * import org.apache.spark.graph.lib._
- * val graph: Graph[_, _] = loadGraph()
- * graph.connectedComponents()
- * }}}
- */
-class Algorithms[VD: ClassTag, ED: ClassTag](self: Graph[VD, ED]) {
- /**
- * Run a dynamic version of PageRank returning a graph with vertex attributes containing the
- * PageRank and edge attributes containing the normalized edge weight.
- *
- * @see [[org.apache.spark.graphx.lib.PageRank]], method `runUntilConvergence`.
- */
- def pageRank(tol: Double, resetProb: Double = 0.15): Graph[Double, Double] = {
- PageRank.runUntilConvergence(self, tol, resetProb)
- }
-
- /**
- * Run PageRank for a fixed number of iterations returning a graph with vertex attributes
- * containing the PageRank and edge attributes the normalized edge weight.
- *
- * @see [[org.apache.spark.graphx.lib.PageRank]], method `run`.
- */
- def staticPageRank(numIter: Int, resetProb: Double = 0.15): Graph[Double, Double] = {
- PageRank.run(self, numIter, resetProb)
- }
-
- /**
- * Compute the connected component membership of each vertex and return a graph with the vertex
- * value containing the lowest vertex id in the connected component containing that vertex.
- *
- * @see [[org.apache.spark.graphx.lib.ConnectedComponents]]
- */
- def connectedComponents(): Graph[VertexID, ED] = {
- ConnectedComponents.run(self)
- }
-
- /**
- * Compute the number of triangles passing through each vertex.
- *
- * @see [[org.apache.spark.graphx.lib.TriangleCount]]
- */
- def triangleCount(): Graph[Int, ED] = {
- TriangleCount.run(self)
- }
-
- /**
- * Compute the strongly connected component (SCC) of each vertex and return a graph with the
- * vertex value containing the lowest vertex id in the SCC containing that vertex.
- *
- * @see [[org.apache.spark.graphx.lib.StronglyConnectedComponents]]
- */
- def stronglyConnectedComponents(numIter: Int): Graph[VertexID, ED] = {
- StronglyConnectedComponents.run(self, numIter)
- }
-}
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/package.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/package.scala
deleted file mode 100644
index f6f2626c9d..0000000000
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/package.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-package org.apache.spark.graphx
-
-import scala.reflect.ClassTag
-
-package object lib {
- implicit def graphToAlgorithms[VD: ClassTag, ED: ClassTag](
- graph: Graph[VD, ED]): Algorithms[VD, ED] = new Algorithms(graph)
-}