diff options
author | Michael Armbrust <michael@databricks.com> | 2015-11-20 15:17:17 -0800 |
---|---|---|
committer | Michael Armbrust <michael@databricks.com> | 2015-11-20 15:17:17 -0800 |
commit | 4b84c72dfbb9ddb415fee35f69305b5d7b280891 (patch) | |
tree | fc539c382da5480312a8be25afe8b9a922811ded /repl/scala-2.10/src/test | |
parent | a6239d587c638691f52eca3eee905c53fbf35a12 (diff) | |
download | spark-4b84c72dfbb9ddb415fee35f69305b5d7b280891.tar.gz spark-4b84c72dfbb9ddb415fee35f69305b5d7b280891.tar.bz2 spark-4b84c72dfbb9ddb415fee35f69305b5d7b280891.zip |
[SPARK-11636][SQL] Support classes defined in the REPL with Encoders
#theScaryParts (i.e. changes to the repl, executor classloaders and codegen)...
Author: Michael Armbrust <michael@databricks.com>
Author: Yin Huai <yhuai@databricks.com>
Closes #9825 from marmbrus/dataset-replClasses2.
Diffstat (limited to 'repl/scala-2.10/src/test')
-rw-r--r-- | repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 5674dcd669..081aa03002 100644 --- a/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/scala-2.10/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -262,6 +262,9 @@ class ReplSuite extends SparkFunSuite { |import sqlContext.implicits._ |case class TestCaseClass(value: Int) |sc.parallelize(1 to 10).map(x => TestCaseClass(x)).toDF().collect() + | + |// Test Dataset Serialization in the REPL + |Seq(TestCaseClass(1)).toDS().collect() """.stripMargin) assertDoesNotContain("error:", output) assertDoesNotContain("Exception", output) @@ -278,6 +281,27 @@ class ReplSuite extends SparkFunSuite { assertDoesNotContain("java.lang.ClassNotFoundException", output) } + test("Datasets and encoders") { + val output = runInterpreter("local", + """ + |import org.apache.spark.sql.functions._ + |import org.apache.spark.sql.Encoder + |import org.apache.spark.sql.expressions.Aggregator + |import org.apache.spark.sql.TypedColumn + |val simpleSum = new Aggregator[Int, Int, Int] with Serializable { + | def zero: Int = 0 // The initial value. + | def reduce(b: Int, a: Int) = b + a // Add an element to the running total + | def merge(b1: Int, b2: Int) = b1 + b2 // Merge intermediate values. + | def finish(b: Int) = b // Return the final result. + |}.toColumn + | + |val ds = Seq(1, 2, 3, 4).toDS() + |ds.select(simpleSum).collect + """.stripMargin) + assertDoesNotContain("error:", output) + assertDoesNotContain("Exception", output) + } + test("SPARK-2632 importing a method from non serializable class and not using it.") { val output = runInterpreter("local", """ |