aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorTakuya UESHIN <ueshin@happy-camper.st>2014-07-02 10:10:36 -0700
committerMichael Armbrust <michael@databricks.com>2014-07-02 10:11:02 -0700
commit87b74a9bfdee19f54c2ecdb3797959723cad507e (patch)
tree651fe732a45113240cc7bdb2b91b35e44052c974 /sql/catalyst
parent552e28b63855668fe9b68e322de17291b4cc0f37 (diff)
downloadspark-87b74a9bfdee19f54c2ecdb3797959723cad507e.tar.gz
spark-87b74a9bfdee19f54c2ecdb3797959723cad507e.tar.bz2
spark-87b74a9bfdee19f54c2ecdb3797959723cad507e.zip
[SPARK-2287] [SQL] Make ScalaReflection be able to handle Generic case classes.
Author: Takuya UESHIN <ueshin@happy-camper.st> Closes #1226 from ueshin/issues/SPARK-2287 and squashes the following commits: 32ef7c3 [Takuya UESHIN] Add execution of `SHOW TABLES` before `TestHive.reset()`. 541dc8d [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287 fac5fae [Takuya UESHIN] Remove unnecessary method receiver. d306e60 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287 7de5706 [Takuya UESHIN] Make ScalaReflection be able to handle Generic case classes. (cherry picked from commit bc7041a42dfa84312492ea8cae6fdeaeac4f6d1c) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala7
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala20
2 files changed, 25 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index ada48eaf5d..5a55be1e51 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -47,10 +47,13 @@ object ScalaReflection {
val TypeRef(_, _, Seq(optType)) = t
Schema(schemaFor(optType).dataType, nullable = true)
case t if t <:< typeOf[Product] =>
- val params = t.member("<init>": TermName).asMethod.paramss
+ val formalTypeArgs = t.typeSymbol.asClass.typeParams
+ val TypeRef(_, _, actualTypeArgs) = t
+ val params = t.member(nme.CONSTRUCTOR).asMethod.paramss
Schema(StructType(
params.head.map { p =>
- val Schema(dataType, nullable) = schemaFor(p.typeSignature)
+ val Schema(dataType, nullable) =
+ schemaFor(p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs))
StructField(p.name.toString, dataType, nullable)
}), nullable = true)
// Need to decide if we actually need a special type here.
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
index 489d7e9c24..c0438dbe52 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
@@ -60,6 +60,9 @@ case class ComplexData(
mapField: Map[Int, String],
structField: PrimitiveData)
+case class GenericData[A](
+ genericField: A)
+
class ScalaReflectionSuite extends FunSuite {
import ScalaReflection._
@@ -128,4 +131,21 @@ class ScalaReflectionSuite extends FunSuite {
nullable = true))),
nullable = true))
}
+
+ test("generic data") {
+ val schema = schemaFor[GenericData[Int]]
+ assert(schema === Schema(
+ StructType(Seq(
+ StructField("genericField", IntegerType, nullable = false))),
+ nullable = true))
+ }
+
+ test("tuple data") {
+ val schema = schemaFor[(Int, String)]
+ assert(schema === Schema(
+ StructType(Seq(
+ StructField("_1", IntegerType, nullable = false),
+ StructField("_2", StringType, nullable = true))),
+ nullable = true))
+ }
}