aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-10-02 22:28:55 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-10-02 22:28:55 -0700
commit6cf5dffc724eb7158beebec7b393343b654c2fad (patch)
tree90362fe52d5cf47f8aeb9d0bf6b8b2919e830be6 /core/src/main
parent87f4451f20fb9deee550a439ce0db094370eb2d2 (diff)
downloadspark-6cf5dffc724eb7158beebec7b393343b654c2fad.tar.gz
spark-6cf5dffc724eb7158beebec7b393343b654c2fad.tar.bz2
spark-6cf5dffc724eb7158beebec7b393343b654c2fad.zip
Make more stuff private[spark]
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/spark/Accumulators.scala1
-rw-r--r--core/src/main/scala/spark/BlockRDD.scala2
-rw-r--r--core/src/main/scala/spark/CartesianRDD.scala3
-rw-r--r--core/src/main/scala/spark/RDD.scala6
-rw-r--r--core/src/main/scala/spark/partial/BoundedDouble.scala1
-rw-r--r--core/src/main/scala/spark/partial/PartialResult.scala2
6 files changed, 11 insertions, 4 deletions
diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/spark/Accumulators.scala
index c157cc8feb..2c29437f64 100644
--- a/core/src/main/scala/spark/Accumulators.scala
+++ b/core/src/main/scala/spark/Accumulators.scala
@@ -93,6 +93,7 @@ trait AccumulableParam[R, T] extends Serializable {
def zero(initialValue: R): R
}
+private[spark]
class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Serializable, T]
extends AccumulableParam[R,T] {
diff --git a/core/src/main/scala/spark/BlockRDD.scala b/core/src/main/scala/spark/BlockRDD.scala
index faa99fe3e9..afc732234f 100644
--- a/core/src/main/scala/spark/BlockRDD.scala
+++ b/core/src/main/scala/spark/BlockRDD.scala
@@ -6,7 +6,7 @@ private[spark] class BlockRDDSplit(val blockId: String, idx: Int) extends Split
val index = idx
}
-
+private[spark]
class BlockRDD[T: ClassManifest](sc: SparkContext, @transient blockIds: Array[String])
extends RDD[T](sc) {
diff --git a/core/src/main/scala/spark/CartesianRDD.scala b/core/src/main/scala/spark/CartesianRDD.scala
index 83db2d2934..52aab5f32f 100644
--- a/core/src/main/scala/spark/CartesianRDD.scala
+++ b/core/src/main/scala/spark/CartesianRDD.scala
@@ -5,6 +5,7 @@ class CartesianSplit(idx: Int, val s1: Split, val s2: Split) extends Split with
override val index: Int = idx
}
+private[spark]
class CartesianRDD[T: ClassManifest, U:ClassManifest](
sc: SparkContext,
rdd1: RDD[T],
@@ -45,4 +46,4 @@ class CartesianRDD[T: ClassManifest, U:ClassManifest](
def getParents(id: Int): Seq[Int] = List(id % numSplitsInRdd2)
}
)
-} \ No newline at end of file
+}
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala
index 351c3d9d0b..35d70b1393 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/spark/RDD.scala
@@ -383,6 +383,7 @@ abstract class RDD[T: ClassManifest](@transient sc: SparkContext) extends Serial
}
}
+private[spark]
class MappedRDD[U: ClassManifest, T: ClassManifest](
prev: RDD[T],
f: T => U)
@@ -393,6 +394,7 @@ class MappedRDD[U: ClassManifest, T: ClassManifest](
override def compute(split: Split) = prev.iterator(split).map(f)
}
+private[spark]
class FlatMappedRDD[U: ClassManifest, T: ClassManifest](
prev: RDD[T],
f: T => TraversableOnce[U])
@@ -403,18 +405,21 @@ class FlatMappedRDD[U: ClassManifest, T: ClassManifest](
override def compute(split: Split) = prev.iterator(split).flatMap(f)
}
+private[spark]
class FilteredRDD[T: ClassManifest](prev: RDD[T], f: T => Boolean) extends RDD[T](prev.context) {
override def splits = prev.splits
override val dependencies = List(new OneToOneDependency(prev))
override def compute(split: Split) = prev.iterator(split).filter(f)
}
+private[spark]
class GlommedRDD[T: ClassManifest](prev: RDD[T]) extends RDD[Array[T]](prev.context) {
override def splits = prev.splits
override val dependencies = List(new OneToOneDependency(prev))
override def compute(split: Split) = Array(prev.iterator(split).toArray).iterator
}
+private[spark]
class MapPartitionsRDD[U: ClassManifest, T: ClassManifest](
prev: RDD[T],
f: Iterator[T] => Iterator[U])
@@ -430,6 +435,7 @@ class MapPartitionsRDD[U: ClassManifest, T: ClassManifest](
* closure. This can be used to generate or collect partition specific
* information such as the number of tuples in a partition.
*/
+private[spark]
class MapPartitionsWithSplitRDD[U: ClassManifest, T: ClassManifest](
prev: RDD[T],
f: (Int, Iterator[T]) => Iterator[U])
diff --git a/core/src/main/scala/spark/partial/BoundedDouble.scala b/core/src/main/scala/spark/partial/BoundedDouble.scala
index 8bedd75182..463c33d6e2 100644
--- a/core/src/main/scala/spark/partial/BoundedDouble.scala
+++ b/core/src/main/scala/spark/partial/BoundedDouble.scala
@@ -3,7 +3,6 @@ package spark.partial
/**
* A Double with error bars on it.
*/
-private[spark]
class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, val high: Double) {
override def toString(): String = "[%.3f, %.3f]".format(low, high)
}
diff --git a/core/src/main/scala/spark/partial/PartialResult.scala b/core/src/main/scala/spark/partial/PartialResult.scala
index beafbf67c3..200ed4ea1e 100644
--- a/core/src/main/scala/spark/partial/PartialResult.scala
+++ b/core/src/main/scala/spark/partial/PartialResult.scala
@@ -1,6 +1,6 @@
package spark.partial
-private[spark] class PartialResult[R](initialVal: R, isFinal: Boolean) {
+class PartialResult[R](initialVal: R, isFinal: Boolean) {
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
private var failure: Option[Exception] = None
private var completionHandler: Option[R => Unit] = None