aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRenat Yusupov <re.yusupov@2gis.ru>2014-10-05 17:56:24 -0700
committerMichael Armbrust <michael@databricks.com>2014-10-05 17:56:34 -0700
commit90897ea5f24b03c9f3455a62c7f68b3d3f0435ad (patch)
treeec2120eb11bb340cf434aae0d50fe635f2b0a403
parent34b97a067d1b370fbed8ecafab2f48501a35d783 (diff)
downloadspark-90897ea5f24b03c9f3455a62c7f68b3d3f0435ad.tar.gz
spark-90897ea5f24b03c9f3455a62c7f68b3d3f0435ad.tar.bz2
spark-90897ea5f24b03c9f3455a62c7f68b3d3f0435ad.zip
[SPARK-3776][SQL] Wrong conversion to Catalyst for Option[Product]
Author: Renat Yusupov <re.yusupov@2gis.ru> Closes #2641 from r3natko/feature/catalyst_option and squashes the following commits: 55d0c06 [Renat Yusupov] [SQL] SPARK-3776: Wrong conversion to Catalyst for Option[Product]
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala21
2 files changed, 19 insertions, 4 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 88a8fa7c28..b3ae8e6779 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -33,7 +33,7 @@ object ScalaReflection {
/** Converts Scala objects to catalyst rows / types */
def convertToCatalyst(a: Any): Any = a match {
- case o: Option[_] => o.orNull
+ case o: Option[_] => o.map(convertToCatalyst).orNull
case s: Seq[_] => s.map(convertToCatalyst)
case m: Map[_, _] => m.map { case (k, v) => convertToCatalyst(k) -> convertToCatalyst(v) }
case p: Product => new GenericRow(p.productIterator.map(convertToCatalyst).toArray)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
index 428607d8c8..488e373854 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
@@ -53,7 +53,8 @@ case class OptionalData(
floatField: Option[Float],
shortField: Option[Short],
byteField: Option[Byte],
- booleanField: Option[Boolean])
+ booleanField: Option[Boolean],
+ structField: Option[PrimitiveData])
case class ComplexData(
arrayField: Seq[Int],
@@ -100,7 +101,7 @@ class ScalaReflectionSuite extends FunSuite {
nullable = true))
}
- test("optinal data") {
+ test("optional data") {
val schema = schemaFor[OptionalData]
assert(schema === Schema(
StructType(Seq(
@@ -110,7 +111,8 @@ class ScalaReflectionSuite extends FunSuite {
StructField("floatField", FloatType, nullable = true),
StructField("shortField", ShortType, nullable = true),
StructField("byteField", ByteType, nullable = true),
- StructField("booleanField", BooleanType, nullable = true))),
+ StructField("booleanField", BooleanType, nullable = true),
+ StructField("structField", schemaFor[PrimitiveData].dataType, nullable = true))),
nullable = true))
}
@@ -228,4 +230,17 @@ class ScalaReflectionSuite extends FunSuite {
assert(ArrayType(IntegerType) === typeOfObject3(Seq(1, 2, 3)))
assert(ArrayType(ArrayType(IntegerType)) === typeOfObject3(Seq(Seq(1,2,3))))
}
+
+ test("convert PrimitiveData to catalyst") {
+ val data = PrimitiveData(1, 1, 1, 1, 1, 1, true)
+ val convertedData = Seq(1, 1.toLong, 1.toDouble, 1.toFloat, 1.toShort, 1.toByte, true)
+ assert(convertToCatalyst(data) === convertedData)
+ }
+
+ test("convert Option[Product] to catalyst") {
+ val primitiveData = PrimitiveData(1, 1, 1, 1, 1, 1, true)
+ val data = OptionalData(Some(1), Some(1), Some(1), Some(1), Some(1), Some(1), Some(true), Some(primitiveData))
+ val convertedData = Seq(1, 1.toLong, 1.toDouble, 1.toFloat, 1.toShort, 1.toByte, true, convertToCatalyst(primitiveData))
+ assert(convertToCatalyst(data) === convertedData)
+ }
}