aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorTakeshi Yamamuro <yamamuro@apache.org>2017-03-21 11:17:34 +0800
committerWenchen Fan <wenchen@databricks.com>2017-03-21 11:17:34 +0800
commit0ec1db5475f1a7839bdbf0d9cffe93ce6970a7fe (patch)
tree3a1c5d945583f7189018904ec033e469ae281b2e /sql/core
parente9c91badce64731ffd3e53cbcd9f044a7593e6b8 (diff)
downloadspark-0ec1db5475f1a7839bdbf0d9cffe93ce6970a7fe.tar.gz
spark-0ec1db5475f1a7839bdbf0d9cffe93ce6970a7fe.tar.bz2
spark-0ec1db5475f1a7839bdbf0d9cffe93ce6970a7fe.zip
[SPARK-19980][SQL] Add NULL checks in Bean serializer
## What changes were proposed in this pull request? A Bean serializer in `ExpressionEncoder` could change values when Beans having NULL. A concrete example is as follows; ``` scala> :paste class Outer extends Serializable { private var cls: Inner = _ def setCls(c: Inner): Unit = cls = c def getCls(): Inner = cls } class Inner extends Serializable { private var str: String = _ def setStr(s: String): Unit = str = str def getStr(): String = str } scala> Seq("""{"cls":null}""", """{"cls": {"str":null}}""").toDF().write.text("data") scala> val encoder = Encoders.bean(classOf[Outer]) scala> val schema = encoder.schema scala> val df = spark.read.schema(schema).json("data").as[Outer](encoder) scala> df.show +------+ | cls| +------+ |[null]| | null| +------+ scala> df.map(x => x)(encoder).show() +------+ | cls| +------+ |[null]| |[null]| // <-- Value changed +------+ ``` This is because the Bean serializer does not have the NULL-check expressions that the serializer of Scala's product types has. Actually, this value change does not happen in Scala's product types; ``` scala> :paste case class Outer(cls: Inner) case class Inner(str: String) scala> val encoder = Encoders.product[Outer] scala> val schema = encoder.schema scala> val df = spark.read.schema(schema).json("data").as[Outer](encoder) scala> df.show +------+ | cls| +------+ |[null]| | null| +------+ scala> df.map(x => x)(encoder).show() +------+ | cls| +------+ |[null]| | null| +------+ ``` This pr added the NULL-check expressions in Bean serializer along with the serializer of Scala's product types. ## How was this patch tested? Added tests in `JavaDatasetSuite`. Author: Takeshi Yamamuro <yamamuro@apache.org> Closes #17347 from maropu/SPARK-19980.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java19
1 files changed, 19 insertions, 0 deletions
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index ca9e5ad2ea..ffb4c6273f 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -1380,4 +1380,23 @@ public class JavaDatasetSuite implements Serializable {
CircularReference4Bean bean = new CircularReference4Bean();
spark.createDataset(Arrays.asList(bean), Encoders.bean(CircularReference4Bean.class));
}
+
+ @Test(expected = RuntimeException.class)
+ public void testNullInTopLevelBean() {
+ NestedSmallBean bean = new NestedSmallBean();
+ // We cannot set null in top-level bean
+ spark.createDataset(Arrays.asList(bean, null), Encoders.bean(NestedSmallBean.class));
+ }
+
+ @Test
+ public void testSerializeNull() {
+ NestedSmallBean bean = new NestedSmallBean();
+ Encoder<NestedSmallBean> encoder = Encoders.bean(NestedSmallBean.class);
+ List<NestedSmallBean> beans = Arrays.asList(bean);
+ Dataset<NestedSmallBean> ds1 = spark.createDataset(beans, encoder);
+ Assert.assertEquals(beans, ds1.collectAsList());
+ Dataset<NestedSmallBean> ds2 =
+ ds1.map((MapFunction<NestedSmallBean, NestedSmallBean>) b -> b, encoder);
+ Assert.assertEquals(beans, ds2.collectAsList());
+ }
}