aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2014-05-19 22:36:24 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2014-05-19 22:36:24 -0700
commit52eb54d02403a3c37d84b9da7cc1cdb261048cf8 (patch)
tree9938c023c1d5206327024a09a2aa05d5e155b97a /core
parentbcb9dce6f444a977c714117811bce0c54b417650 (diff)
downloadspark-52eb54d02403a3c37d84b9da7cc1cdb261048cf8.tar.gz
spark-52eb54d02403a3c37d84b9da7cc1cdb261048cf8.tar.bz2
spark-52eb54d02403a3c37d84b9da7cc1cdb261048cf8.zip
[Spark 1877] ClassNotFoundException when loading RDD with serialized objects
Updated version of #821 Author: Tathagata Das <tathagata.das1565@gmail.com> Author: Ghidireac <bogdang@u448a5b0a73d45358d94a.ant.amazon.com> Closes #835 from tdas/SPARK-1877 and squashes the following commits: f346f71 [Tathagata Das] Addressed Patrick's comments. fee0c5d [Ghidireac] SPARK-1877: ClassNotFoundException when loading RDD with serialized objects
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
1 files changed, 1 insertions, 1 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 634c10c33f..49737fa4be 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -718,7 +718,7 @@ class SparkContext(config: SparkConf) extends Logging {
minPartitions: Int = defaultMinPartitions
): RDD[T] = {
sequenceFile(path, classOf[NullWritable], classOf[BytesWritable], minPartitions)
- .flatMap(x => Utils.deserialize[Array[T]](x._2.getBytes))
+ .flatMap(x => Utils.deserialize[Array[T]](x._2.getBytes, Utils.getContextOrSparkClassLoader))
}
protected[spark] def checkpointFile[T: ClassTag](