aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorJoseph K. Bradley <joseph@databricks.com>2015-01-30 15:40:14 -0800
committerMichael Armbrust <michael@databricks.com>2015-01-30 15:40:14 -0800
commite643de42a70834dc967664bd297b58fc91a998e7 (patch)
tree7f0d6e44954efce416d0ca08a4993b3451f3bb19 /sql/catalyst
parent986977340d0d02dbd0346bd233dbd93b8c8e74c9 (diff)
downloadspark-e643de42a70834dc967664bd297b58fc91a998e7.tar.gz
spark-e643de42a70834dc967664bd297b58fc91a998e7.tar.bz2
spark-e643de42a70834dc967664bd297b58fc91a998e7.zip
[SPARK-5504] [sql] convertToCatalyst should support nested arrays
After the recent refactoring, convertToCatalyst in ScalaReflection does not recurse on Arrays. It should. The test suite modification made the test fail before the fix in ScalaReflection. The fix makes the test suite succeed. CC: marmbrus Author: Joseph K. Bradley <joseph@databricks.com> Closes #4295 from jkbradley/SPARK-5504 and squashes the following commits: 6b7276d [Joseph K. Bradley] Fixed issue in ScalaReflection.convertToCatalyst with Arrays with non-primitive types. Modified test suite so it failed before the fix and works after the fix.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala8
2 files changed, 11 insertions, 3 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 4def65b01f..90646fd25b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -57,7 +57,11 @@ trait ScalaReflection {
case (obj, udt: UserDefinedType[_]) => udt.serialize(obj)
case (o: Option[_], _) => o.map(convertToCatalyst(_, dataType)).orNull
case (s: Seq[_], arrayType: ArrayType) => s.map(convertToCatalyst(_, arrayType.elementType))
- case (s: Array[_], arrayType: ArrayType) => s.toSeq
+ case (s: Array[_], arrayType: ArrayType) => if (arrayType.elementType.isPrimitive) {
+ s.toSeq
+ } else {
+ s.toSeq.map(convertToCatalyst(_, arrayType.elementType))
+ }
case (m: Map[_, _], mapType: MapType) => m.map { case (k, v) =>
convertToCatalyst(k, mapType.keyType) -> convertToCatalyst(v, mapType.valueType)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
index 4a66716e0a..d0f547d187 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
@@ -64,7 +64,8 @@ case class ComplexData(
arrayFieldContainsNull: Seq[java.lang.Integer],
mapField: Map[Int, Long],
mapFieldValueContainsNull: Map[Int, java.lang.Long],
- structField: PrimitiveData)
+ structField: PrimitiveData,
+ nestedArrayField: Array[Array[Int]])
case class GenericData[A](
genericField: A)
@@ -158,7 +159,10 @@ class ScalaReflectionSuite extends FunSuite {
StructField("shortField", ShortType, nullable = false),
StructField("byteField", ByteType, nullable = false),
StructField("booleanField", BooleanType, nullable = false))),
- nullable = true))),
+ nullable = true),
+ StructField(
+ "nestedArrayField",
+ ArrayType(ArrayType(IntegerType, containsNull = false), containsNull = true)))),
nullable = true))
}