aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/java
diff options
context:
space:
mode:
authorKousuke Saruta <sarutak@oss.nttdata.co.jp>2015-12-18 14:05:06 -0800
committerMichael Armbrust <michael@databricks.com>2015-12-18 14:05:06 -0800
commit6eba655259d2bcea27d0147b37d5d1e476e85422 (patch)
treef25a36ec77cb23da0ec848fa5b0b2f72cc8cb07d /sql/core/src/test/java
parent41ee7c57abd9f52065fd7ffb71a8af229603371d (diff)
downloadspark-6eba655259d2bcea27d0147b37d5d1e476e85422.tar.gz
spark-6eba655259d2bcea27d0147b37d5d1e476e85422.tar.bz2
spark-6eba655259d2bcea27d0147b37d5d1e476e85422.zip
[SPARK-12404][SQL] Ensure objects passed to StaticInvoke is Serializable
Now `StaticInvoke` receives `Any` as a object and `StaticInvoke` can be serialized but sometimes the object passed is not serializable. For example, following code raises Exception because `RowEncoder#extractorsFor` invoked indirectly makes `StaticInvoke`. ``` case class TimestampContainer(timestamp: java.sql.Timestamp) val rdd = sc.parallelize(1 to 2).map(_ => TimestampContainer(System.currentTimeMillis)) val df = rdd.toDF val ds = df.as[TimestampContainer] val rdd2 = ds.rdd <----------------- invokes extractorsFor indirectory ``` I'll add test cases. Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Author: Michael Armbrust <michael@databricks.com> Closes #10357 from sarutak/SPARK-12404.
Diffstat (limited to 'sql/core/src/test/java')
-rw-r--r--sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java52
1 files changed, 52 insertions, 0 deletions
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index 383a2d0bad..0dbaeb81c7 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -39,6 +39,7 @@ import org.apache.spark.sql.expressions.Aggregator;
import org.apache.spark.sql.test.TestSQLContext;
import org.apache.spark.sql.catalyst.encoders.OuterScopes;
import org.apache.spark.sql.catalyst.expressions.GenericRow;
+import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.StructType;
import static org.apache.spark.sql.functions.*;
@@ -608,6 +609,44 @@ public class JavaDatasetSuite implements Serializable {
}
}
+ public class SimpleJavaBean2 implements Serializable {
+ private Timestamp a;
+ private Date b;
+ private java.math.BigDecimal c;
+
+ public Timestamp getA() { return a; }
+
+ public void setA(Timestamp a) { this.a = a; }
+
+ public Date getB() { return b; }
+
+ public void setB(Date b) { this.b = b; }
+
+ public java.math.BigDecimal getC() { return c; }
+
+ public void setC(java.math.BigDecimal c) { this.c = c; }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SimpleJavaBean that = (SimpleJavaBean) o;
+
+ if (!a.equals(that.a)) return false;
+ if (!b.equals(that.b)) return false;
+ return c.equals(that.c);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = a.hashCode();
+ result = 31 * result + b.hashCode();
+ result = 31 * result + c.hashCode();
+ return result;
+ }
+ }
+
public class NestedJavaBean implements Serializable {
private SimpleJavaBean a;
@@ -689,4 +728,17 @@ public class JavaDatasetSuite implements Serializable {
.as(Encoders.bean(SimpleJavaBean.class));
Assert.assertEquals(data, ds3.collectAsList());
}
+
+ @Test
+ public void testJavaBeanEncoder2() {
+ // This is a regression test of SPARK-12404
+ OuterScopes.addOuterScope(this);
+ SimpleJavaBean2 obj = new SimpleJavaBean2();
+ obj.setA(new Timestamp(0));
+ obj.setB(new Date(0));
+ obj.setC(java.math.BigDecimal.valueOf(1));
+ Dataset<SimpleJavaBean2> ds =
+ context.createDataset(Arrays.asList(obj), Encoders.bean(SimpleJavaBean2.class));
+ ds.collect();
+ }
}