aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/TestUtils.scala4
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala25
2 files changed, 27 insertions, 2 deletions
diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala
index 885c6829a2..8ca731038e 100644
--- a/core/src/main/scala/org/apache/spark/TestUtils.scala
+++ b/core/src/main/scala/org/apache/spark/TestUtils.scala
@@ -92,8 +92,8 @@ private[spark] object TestUtils {
def createCompiledClass(className: String, destDir: File, value: String = ""): File = {
val compiler = ToolProvider.getSystemJavaCompiler
val sourceFile = new JavaSourceFromString(className,
- "public class " + className + " { @Override public String toString() { " +
- "return \"" + value + "\";}}")
+ "public class " + className + " implements java.io.Serializable {" +
+ " @Override public String toString() { return \"" + value + "\"; }}")
// Calling this outputs a class file in pwd. It's easier to just rename the file than
// build a custom FileManager that controls the output location.
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 070e974657..c70e22cf09 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -177,6 +177,31 @@ class FileSuite extends FunSuite with LocalSparkContext {
assert(output.collect().toList === List((1, "a"), (2, "aa"), (3, "aaa")))
}
+ test("object files of classes from a JAR") {
+ val original = Thread.currentThread().getContextClassLoader
+ val className = "FileSuiteObjectFileTest"
+ val jar = TestUtils.createJarWithClasses(Seq(className))
+ val loader = new java.net.URLClassLoader(Array(jar), Utils.getContextOrSparkClassLoader)
+ Thread.currentThread().setContextClassLoader(loader)
+ try {
+ sc = new SparkContext("local", "test")
+ val objs = sc.makeRDD(1 to 3).map { x =>
+ val loader = Thread.currentThread().getContextClassLoader
+ Class.forName(className, true, loader).newInstance()
+ }
+ val outputDir = new File(tempDir, "output").getAbsolutePath
+ objs.saveAsObjectFile(outputDir)
+ // Try reading the output back as an object file
+ val ct = reflect.ClassTag[Any](Class.forName(className, true, loader))
+ val output = sc.objectFile[Any](outputDir)
+ assert(output.collect().size === 3)
+ assert(output.collect().head.getClass.getName === className)
+ }
+ finally {
+ Thread.currentThread().setContextClassLoader(original)
+ }
+ }
+
test("write SequenceFile using new Hadoop API") {
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
sc = new SparkContext("local", "test")