aboutsummaryrefslogtreecommitdiff
path: root/bagel/src/main
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-02-27 11:12:21 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-02-27 11:12:21 -0800
commit12bbca20657c17d5ebfceaacb37dddc851772675 (patch)
tree53d717c10b2b7ede275608b1829e6f44389daa2d /bagel/src/main
parentaace2c097ed2ca8bca33a3a3f07fb8bf772b3c50 (diff)
downloadspark-12bbca20657c17d5ebfceaacb37dddc851772675.tar.gz
spark-12bbca20657c17d5ebfceaacb37dddc851772675.tar.bz2
spark-12bbca20657c17d5ebfceaacb37dddc851772675.zip
SPARK 1084.1 (resubmitted)
(Ported from https://github.com/apache/incubator-spark/pull/637 ) Author: Sean Owen <sowen@cloudera.com> Closes #31 from srowen/SPARK-1084.1 and squashes the following commits: 6c4a32c [Sean Owen] Suppress warnings about legitimate unchecked array creations, or change code to avoid it f35b833 [Sean Owen] Fix two misc javadoc problems 254e8ef [Sean Owen] Fix one new style error introduced in scaladoc warning commit 5b2fce2 [Sean Owen] Fix scaladoc invocation warning, and enable javac warnings properly, with plugin config updates 007762b [Sean Owen] Remove dead scaladoc links b8ff8cb [Sean Owen] Replace deprecated Ant <tasks> with <target>
Diffstat (limited to 'bagel/src/main')
-rw-r--r--bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala14
1 files changed, 7 insertions, 7 deletions
diff --git a/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
index dd3eed8aff..70c7474a93 100644
--- a/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
+++ b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
@@ -27,7 +27,7 @@ object Bagel extends Logging {
/**
* Runs a Bagel program.
- * @param sc [[org.apache.spark.SparkContext]] to use for the program.
+ * @param sc org.apache.spark.SparkContext to use for the program.
* @param vertices vertices of the graph represented as an RDD of (Key, Vertex) pairs. Often the
* Key will be the vertex id.
* @param messages initial set of messages represented as an RDD of (Key, Message) pairs. Often
@@ -38,10 +38,10 @@ object Bagel extends Logging {
* @param aggregator [[org.apache.spark.bagel.Aggregator]] performs a reduce across all vertices
* after each superstep and provides the result to each vertex in the next
* superstep.
- * @param partitioner [[org.apache.spark.Partitioner]] partitions values by key
+ * @param partitioner org.apache.spark.Partitioner partitions values by key
* @param numPartitions number of partitions across which to split the graph.
* Default is the default parallelism of the SparkContext
- * @param storageLevel [[org.apache.spark.storage.StorageLevel]] to use for caching of
+ * @param storageLevel org.apache.spark.storage.StorageLevel to use for caching of
* intermediate RDDs in each superstep. Defaults to caching in memory.
* @param compute function that takes a Vertex, optional set of (possibly combined) messages to
* the Vertex, optional Aggregator and the current superstep,
@@ -131,7 +131,7 @@ object Bagel extends Logging {
/**
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default
- * [[org.apache.spark.HashPartitioner]] and default storage level
+ * org.apache.spark.HashPartitioner and default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
@@ -146,7 +146,7 @@ object Bagel extends Logging {
/**
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the
- * default [[org.apache.spark.HashPartitioner]]
+ * default org.apache.spark.HashPartitioner
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
@@ -166,7 +166,7 @@ object Bagel extends Logging {
/**
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]],
- * default [[org.apache.spark.HashPartitioner]],
+ * default org.apache.spark.HashPartitioner,
* [[org.apache.spark.bagel.DefaultCombiner]] and the default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
@@ -180,7 +180,7 @@ object Bagel extends Logging {
/**
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]],
- * the default [[org.apache.spark.HashPartitioner]]
+ * the default org.apache.spark.HashPartitioner
* and [[org.apache.spark.bagel.DefaultCombiner]]
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](