From 87b74a9bfdee19f54c2ecdb3797959723cad507e Mon Sep 17 00:00:00 2001 From: Takuya UESHIN Date: Wed, 2 Jul 2014 10:10:36 -0700 Subject: [SPARK-2287] [SQL] Make ScalaReflection be able to handle Generic case classes. Author: Takuya UESHIN Closes #1226 from ueshin/issues/SPARK-2287 and squashes the following commits: 32ef7c3 [Takuya UESHIN] Add execution of `SHOW TABLES` before `TestHive.reset()`. 541dc8d [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287 fac5fae [Takuya UESHIN] Remove unnecessary method receiver. d306e60 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287 7de5706 [Takuya UESHIN] Make ScalaReflection be able to handle Generic case classes. (cherry picked from commit bc7041a42dfa84312492ea8cae6fdeaeac4f6d1c) Signed-off-by: Michael Armbrust --- .../apache/spark/sql/catalyst/ScalaReflection.scala | 7 +++++-- .../spark/sql/catalyst/ScalaReflectionSuite.scala | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) (limited to 'sql/catalyst') diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala index ada48eaf5d..5a55be1e51 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala @@ -47,10 +47,13 @@ object ScalaReflection { val TypeRef(_, _, Seq(optType)) = t Schema(schemaFor(optType).dataType, nullable = true) case t if t <:< typeOf[Product] => - val params = t.member("": TermName).asMethod.paramss + val formalTypeArgs = t.typeSymbol.asClass.typeParams + val TypeRef(_, _, actualTypeArgs) = t + val params = t.member(nme.CONSTRUCTOR).asMethod.paramss Schema(StructType( params.head.map { p => - val Schema(dataType, nullable) = schemaFor(p.typeSignature) + val Schema(dataType, nullable) = + schemaFor(p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs)) StructField(p.name.toString, dataType, nullable) }), nullable = true) // Need to decide if we actually need a special type here. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala index 489d7e9c24..c0438dbe52 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala @@ -60,6 +60,9 @@ case class ComplexData( mapField: Map[Int, String], structField: PrimitiveData) +case class GenericData[A]( + genericField: A) + class ScalaReflectionSuite extends FunSuite { import ScalaReflection._ @@ -128,4 +131,21 @@ class ScalaReflectionSuite extends FunSuite { nullable = true))), nullable = true)) } + + test("generic data") { + val schema = schemaFor[GenericData[Int]] + assert(schema === Schema( + StructType(Seq( + StructField("genericField", IntegerType, nullable = false))), + nullable = true)) + } + + test("tuple data") { + val schema = schemaFor[(Int, String)] + assert(schema === Schema( + StructType(Seq( + StructField("_1", IntegerType, nullable = false), + StructField("_2", StringType, nullable = true))), + nullable = true)) + } } -- cgit v1.2.3