aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/java
diff options
context:
space:
mode:
authorPunya Biswal <pbiswal@palantir.com>2015-04-21 14:50:02 -0700
committerMichael Armbrust <michael@databricks.com>2015-04-21 14:50:02 -0700
commit2a24bf92e6d36e876bad6a8b4e0ff12c407ebb8a (patch)
treec49d92745450245227daa05c2b86f4cf51352bb9 /sql/core/src/test/java
parent6265cba00f6141575b4be825735d77d4cea500ab (diff)
downloadspark-2a24bf92e6d36e876bad6a8b4e0ff12c407ebb8a.tar.gz
spark-2a24bf92e6d36e876bad6a8b4e0ff12c407ebb8a.tar.bz2
spark-2a24bf92e6d36e876bad6a8b4e0ff12c407ebb8a.zip
[SPARK-6996][SQL] Support map types in java beans
liancheng mengxr this is similar to #5146. Author: Punya Biswal <pbiswal@palantir.com> Closes #5578 from punya/feature/SPARK-6996 and squashes the following commits: d56c3e0 [Punya Biswal] Fix imports c7e308b [Punya Biswal] Support java iterable types in POJOs 5e00685 [Punya Biswal] Support map types in java beans
Diffstat (limited to 'sql/core/src/test/java')
-rw-r--r--sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java57
1 files changed, 45 insertions, 12 deletions
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
index 6d0fbe83c2..fc3ed4a708 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java
@@ -17,23 +17,28 @@
package test.org.apache.spark.sql;
-import java.io.Serializable;
-import java.util.Arrays;
-
-import scala.collection.Seq;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.primitives.Ints;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
-import org.apache.spark.sql.*;
+import org.apache.spark.sql.DataFrame;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SQLContext;
+import org.apache.spark.sql.TestData$;
import org.apache.spark.sql.test.TestSQLContext;
import org.apache.spark.sql.test.TestSQLContext$;
import org.apache.spark.sql.types.*;
+import org.junit.*;
+
+import scala.collection.JavaConversions;
+import scala.collection.Seq;
+import scala.collection.mutable.Buffer;
+
+import java.io.Serializable;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
import static org.apache.spark.sql.functions.*;
@@ -106,6 +111,8 @@ public class JavaDataFrameSuite {
public static class Bean implements Serializable {
private double a = 0.0;
private Integer[] b = new Integer[]{0, 1};
+ private Map<String, int[]> c = ImmutableMap.of("hello", new int[] { 1, 2 });
+ private List<String> d = Arrays.asList("floppy", "disk");
public double getA() {
return a;
@@ -114,6 +121,14 @@ public class JavaDataFrameSuite {
public Integer[] getB() {
return b;
}
+
+ public Map<String, int[]> getC() {
+ return c;
+ }
+
+ public List<String> getD() {
+ return d;
+ }
}
@Test
@@ -127,7 +142,15 @@ public class JavaDataFrameSuite {
Assert.assertEquals(
new StructField("b", new ArrayType(IntegerType$.MODULE$, true), true, Metadata.empty()),
schema.apply("b"));
- Row first = df.select("a", "b").first();
+ ArrayType valueType = new ArrayType(DataTypes.IntegerType, false);
+ MapType mapType = new MapType(DataTypes.StringType, valueType, true);
+ Assert.assertEquals(
+ new StructField("c", mapType, true, Metadata.empty()),
+ schema.apply("c"));
+ Assert.assertEquals(
+ new StructField("d", new ArrayType(DataTypes.StringType, true), true, Metadata.empty()),
+ schema.apply("d"));
+ Row first = df.select("a", "b", "c", "d").first();
Assert.assertEquals(bean.getA(), first.getDouble(0), 0.0);
// Now Java lists and maps are converetd to Scala Seq's and Map's. Once we get a Seq below,
// verify that it has the expected length, and contains expected elements.
@@ -136,5 +159,15 @@ public class JavaDataFrameSuite {
for (int i = 0; i < result.length(); i++) {
Assert.assertEquals(bean.getB()[i], result.apply(i));
}
+ Buffer<Integer> outputBuffer = (Buffer<Integer>) first.getJavaMap(2).get("hello");
+ Assert.assertArrayEquals(
+ bean.getC().get("hello"),
+ Ints.toArray(JavaConversions.asJavaList(outputBuffer)));
+ Seq<String> d = first.getAs(3);
+ Assert.assertEquals(bean.getD().size(), d.length());
+ for (int i = 0; i < d.length(); i++) {
+ Assert.assertEquals(bean.getD().get(i), d.apply(i));
+ }
}
+
}