aboutsummaryrefslogtreecommitdiff
path: root/bagel
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-02-17 22:13:26 -0800
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-02-17 22:13:26 -0800
commit06e5e6627f3856b5c6e3e60cbb167044de9ef6d4 (patch)
tree071703ecd96e11405ffda520e66349a642c6c614 /bagel
parent455d015076ab1fcafa99484c8dcf7cc9d740686a (diff)
downloadspark-06e5e6627f3856b5c6e3e60cbb167044de9ef6d4.tar.gz
spark-06e5e6627f3856b5c6e3e60cbb167044de9ef6d4.tar.bz2
spark-06e5e6627f3856b5c6e3e60cbb167044de9ef6d4.zip
Renamed "splits" to "partitions"
Diffstat (limited to 'bagel')
-rw-r--r--bagel/src/main/scala/spark/bagel/Bagel.scala20
-rw-r--r--bagel/src/main/scala/spark/bagel/examples/WikipediaPageRank.scala6
-rw-r--r--bagel/src/main/scala/spark/bagel/examples/WikipediaPageRankStandalone.scala2
3 files changed, 14 insertions, 14 deletions
diff --git a/bagel/src/main/scala/spark/bagel/Bagel.scala b/bagel/src/main/scala/spark/bagel/Bagel.scala
index fa0ba4a573..094e57dacb 100644
--- a/bagel/src/main/scala/spark/bagel/Bagel.scala
+++ b/bagel/src/main/scala/spark/bagel/Bagel.scala
@@ -14,11 +14,11 @@ object Bagel extends Logging {
combiner: Combiner[M, C],
aggregator: Option[Aggregator[V, A]],
partitioner: Partitioner,
- numSplits: Int
+ numPartitions: Int
)(
compute: (V, Option[C], Option[A], Int) => (V, Array[M])
): RDD[(K, V)] = {
- val splits = if (numSplits != 0) numSplits else sc.defaultParallelism
+ val splits = if (numPartitions != 0) numPartitions else sc.defaultParallelism
var superstep = 0
var verts = vertices
@@ -56,12 +56,12 @@ object Bagel extends Logging {
messages: RDD[(K, M)],
combiner: Combiner[M, C],
partitioner: Partitioner,
- numSplits: Int
+ numPartitions: Int
)(
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = {
run[K, V, M, C, Nothing](
- sc, vertices, messages, combiner, None, partitioner, numSplits)(
+ sc, vertices, messages, combiner, None, partitioner, numPartitions)(
addAggregatorArg[K, V, M, C](compute))
}
@@ -70,13 +70,13 @@ object Bagel extends Logging {
vertices: RDD[(K, V)],
messages: RDD[(K, M)],
combiner: Combiner[M, C],
- numSplits: Int
+ numPartitions: Int
)(
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = {
- val part = new HashPartitioner(numSplits)
+ val part = new HashPartitioner(numPartitions)
run[K, V, M, C, Nothing](
- sc, vertices, messages, combiner, None, part, numSplits)(
+ sc, vertices, messages, combiner, None, part, numPartitions)(
addAggregatorArg[K, V, M, C](compute))
}
@@ -84,13 +84,13 @@ object Bagel extends Logging {
sc: SparkContext,
vertices: RDD[(K, V)],
messages: RDD[(K, M)],
- numSplits: Int
+ numPartitions: Int
)(
compute: (V, Option[Array[M]], Int) => (V, Array[M])
): RDD[(K, V)] = {
- val part = new HashPartitioner(numSplits)
+ val part = new HashPartitioner(numPartitions)
run[K, V, M, Array[M], Nothing](
- sc, vertices, messages, new DefaultCombiner(), None, part, numSplits)(
+ sc, vertices, messages, new DefaultCombiner(), None, part, numPartitions)(
addAggregatorArg[K, V, M, Array[M]](compute))
}
diff --git a/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRank.scala b/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRank.scala
index 03843019c0..bc32663e0f 100644
--- a/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRank.scala
+++ b/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRank.scala
@@ -16,7 +16,7 @@ import scala.xml.{XML,NodeSeq}
object WikipediaPageRank {
def main(args: Array[String]) {
if (args.length < 5) {
- System.err.println("Usage: WikipediaPageRank <inputFile> <threshold> <numSplits> <host> <usePartitioner>")
+ System.err.println("Usage: WikipediaPageRank <inputFile> <threshold> <numPartitions> <host> <usePartitioner>")
System.exit(-1)
}
@@ -25,7 +25,7 @@ object WikipediaPageRank {
val inputFile = args(0)
val threshold = args(1).toDouble
- val numSplits = args(2).toInt
+ val numPartitions = args(2).toInt
val host = args(3)
val usePartitioner = args(4).toBoolean
val sc = new SparkContext(host, "WikipediaPageRank")
@@ -69,7 +69,7 @@ object WikipediaPageRank {
val result =
Bagel.run(
sc, vertices, messages, combiner = new PRCombiner(),
- numSplits = numSplits)(
+ numPartitions = numPartitions)(
utils.computeWithCombiner(numVertices, epsilon))
// Print the result
diff --git a/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRankStandalone.scala b/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRankStandalone.scala
index 06cc8c748b..9d9d80d809 100644
--- a/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRankStandalone.scala
+++ b/bagel/src/main/scala/spark/bagel/examples/WikipediaPageRankStandalone.scala
@@ -88,7 +88,7 @@ object WikipediaPageRankStandalone {
n: Long,
partitioner: Partitioner,
usePartitioner: Boolean,
- numSplits: Int
+ numPartitions: Int
): RDD[(String, Double)] = {
var ranks = links.mapValues { edges => defaultRank }
for (i <- 1 to numIterations) {