aboutsummaryrefslogtreecommitdiff
path: root/examples/src
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2014-05-10 12:10:24 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-10 12:10:24 -0700
commit7eefc9d2b3f6ebc0ecb5562da7323f1e06afbb35 (patch)
tree32a35b0898c5710cdbd73b56ef9dcb9914e1cf02 /examples/src
parent8e94d2721a9d3d36697e13f8cc6567ae8aeee78b (diff)
downloadspark-7eefc9d2b3f6ebc0ecb5562da7323f1e06afbb35.tar.gz
spark-7eefc9d2b3f6ebc0ecb5562da7323f1e06afbb35.tar.bz2
spark-7eefc9d2b3f6ebc0ecb5562da7323f1e06afbb35.zip
SPARK-1708. Add a ClassTag on Serializer and things that depend on it
This pull request contains a rebased patch from @heathermiller (https://github.com/heathermiller/spark/pull/1) to add ClassTags on Serializer and types that depend on it (Broadcast and AccumulableCollection). Putting these in the public API signatures now will allow us to use Scala Pickling for serialization down the line without breaking binary compatibility. One question remaining is whether we also want them on Accumulator -- Accumulator is passed as part of a bigger Task or TaskResult object via the closure serializer so it doesn't seem super useful to add the ClassTag there. Broadcast and AccumulableCollection in contrast were being serialized directly. CC @rxin, @pwendell, @heathermiller Author: Matei Zaharia <matei@databricks.com> Closes #700 from mateiz/spark-1708 and squashes the following commits: 1a3d8b0 [Matei Zaharia] Use fake ClassTag in Java 3b449ed [Matei Zaharia] test fix 2209a27 [Matei Zaharia] Code style fixes 9d48830 [Matei Zaharia] Add a ClassTag on Serializer and things that depend on it
Diffstat (limited to 'examples/src')
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala12
1 files changed, 7 insertions, 5 deletions
diff --git a/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
index a197dac87d..576a3e371b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
@@ -28,6 +28,8 @@ import org.apache.spark.serializer.{DeserializationStream, SerializationStream,
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
+import scala.reflect.ClassTag
+
object WikipediaPageRankStandalone {
def main(args: Array[String]) {
if (args.length < 4) {
@@ -143,15 +145,15 @@ class WPRSerializer extends org.apache.spark.serializer.Serializer {
}
class WPRSerializerInstance extends SerializerInstance {
- def serialize[T](t: T): ByteBuffer = {
+ def serialize[T: ClassTag](t: T): ByteBuffer = {
throw new UnsupportedOperationException()
}
- def deserialize[T](bytes: ByteBuffer): T = {
+ def deserialize[T: ClassTag](bytes: ByteBuffer): T = {
throw new UnsupportedOperationException()
}
- def deserialize[T](bytes: ByteBuffer, loader: ClassLoader): T = {
+ def deserialize[T: ClassTag](bytes: ByteBuffer, loader: ClassLoader): T = {
throw new UnsupportedOperationException()
}
@@ -167,7 +169,7 @@ class WPRSerializerInstance extends SerializerInstance {
class WPRSerializationStream(os: OutputStream) extends SerializationStream {
val dos = new DataOutputStream(os)
- def writeObject[T](t: T): SerializationStream = t match {
+ def writeObject[T: ClassTag](t: T): SerializationStream = t match {
case (id: String, wrapper: ArrayBuffer[_]) => wrapper(0) match {
case links: Array[String] => {
dos.writeInt(0) // links
@@ -200,7 +202,7 @@ class WPRSerializationStream(os: OutputStream) extends SerializationStream {
class WPRDeserializationStream(is: InputStream) extends DeserializationStream {
val dis = new DataInputStream(is)
- def readObject[T](): T = {
+ def readObject[T: ClassTag](): T = {
val typeId = dis.readInt()
typeId match {
case 0 => {