aboutsummaryrefslogtreecommitdiff
path: root/repl/src/test/scala/org
diff options
context:
space:
mode:
Diffstat (limited to 'repl/src/test/scala/org')
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala27
1 files changed, 27 insertions, 0 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index e2d8d5ff38..c8763eb277 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -256,6 +256,33 @@ class ReplSuite extends FunSuite {
assertDoesNotContain("error:", output)
assertDoesNotContain("Exception", output)
}
+
+ test("SPARK-2576 importing SQLContext.createSchemaRDD.") {
+ // We need to use local-cluster to test this case.
+ val output = runInterpreter("local-cluster[1,1,512]",
+ """
+ |val sqlContext = new org.apache.spark.sql.SQLContext(sc)
+ |import sqlContext.createSchemaRDD
+ |case class TestCaseClass(value: Int)
+ |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toSchemaRDD.collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
+ test("SPARK-2632 importing a method from non serializable class and not using it.") {
+ val output = runInterpreter("local",
+ """
+ |class TestClass() { def testMethod = 3 }
+ |val t = new TestClass
+ |import t.testMethod
+ |case class TestCaseClass(value: Int)
+ |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).collect
+ """.stripMargin)
+ assertDoesNotContain("error:", output)
+ assertDoesNotContain("Exception", output)
+ }
+
if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
test("running on Mesos") {
val output = runInterpreter("localquiet",