aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJakob Odersky <jakob@odersky.com>2016-09-23 12:23:17 -0700
committerJakob Odersky <jakob@odersky.com>2016-09-23 12:23:17 -0700
commit904b95f4048e5453c885fe9054c47b3d0d75dcef (patch)
tree08bce1a5aaf891329d44cf63908a7e202fda3ccb
parent3e7eb398930d2ef302a04021ae7145fc1b901a21 (diff)
downloadspark-macros.tar.gz
spark-macros.tar.bz2
spark-macros.zip
Discover issuesmacros
-rw-r--r--project/SparkBuild.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala23
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala14
4 files changed, 35 insertions, 5 deletions
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index a39c93e957..c392f23351 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -287,6 +287,7 @@ object SparkBuild extends PomBuild {
},
scalacOptions in Compile ++= Seq(
+ "-Xlog-free-terms",
s"-target:jvm-${scalacJVMVersion.value}",
"-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath // Required for relative source links in scaladoc
) ++ sys.env.get("JAVA_7_HOME").toSeq.flatMap { jdk7 =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala
index 2e45b14801..88815e8ae4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala
@@ -26,6 +26,8 @@ class MacrosHelper[C <: Context](val context: C) extends ScalaReflection {
import universe._
+ def getClassFromType(tpe: Type): Class[_] = Class.forName("foo")
+
def generate[T: context.WeakTypeTag]: context.Tree = {
val tag = implicitly[WeakTypeTag[T]]
val tpe = tag.tpe
@@ -42,17 +44,30 @@ class MacrosHelper[C <: Context](val context: C) extends ScalaReflection {
val serializer = serializerFor[T](nullSafeInput)(tag)
val deserializer = deserializerFor[T](tag)
- val schema = schemaFor[T](tag) match {
+ def schema = schemaFor[T](tag) match {
case Schema(s: StructType, _) => s
case Schema(dt, nullable) => new StructType().add("value", dt, nullable)
}
- q"""new _root_.org.apache.spark.sql.catalyst.encoders.ExpressionEncoder[T](
- ${reify(schema)},
+ val enc = new _root_.org.apache.spark.sql.catalyst.encoders.ExpressionEncoder[T](
+ schema,
+ flat,
+ serializer.flatten,
+ deserializer,
+ null
+ )
+ //reify(enc).tree
+
+ /*
+ val tree = q"""new _root_.org.apache.spark.sql.catalyst.encoders.ExpressionEncoder[T](
+ ${schema},
${reify(flat)},
${reify(serializer.flatten)},
${reify(deserializer)},
classTag[$tpe])"""
+ */
+ //println(show(tree))
+ //tree
}
}
@@ -62,7 +77,7 @@ import scala.language.experimental.macros
object ScalaMacros {
- final def newEncoder[T]: ExpressionEncoder[T] = macro newEncoderImpl[T]
+ implicit def newEncoder[T]: ExpressionEncoder[T] = macro newEncoderImpl[T]
def newEncoderImpl[T: c.WeakTypeTag](c: Context): c.Expr[ExpressionEncoder[T]] = {
val helper = new MacrosHelper[c.type](c)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 53848641eb..0cc6b07104 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -99,7 +99,7 @@ trait ScalaReflection {
constructParams(t).map(_.name.toString)
}
- def getClassFromType(tpe: Type): Class[_] = ???
+ def getClassFromType(tpe: Type): Class[_]
/**
* Return the Scala Type for `T` in the current classloader mirror.
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
index 4df9062018..c0730bb921 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
@@ -409,3 +409,17 @@ class ExpressionEncoderSuite extends PlanTest with AnalysisTest {
}
}
}
+
+object MoreExpressions {
+ //implicit def encoder[T : TypeTag]: ExpressionEncoder[T] =
+ //org.apache.spark.sql.catalyst.ScalaMacros.newEncoder[T]
+
+ import org.apache.spark.sql.catalyst.ScalaMacros._
+
+ def foo(implicit ec: ExpressionEncoder[Int]) = ec
+
+ val x = foo
+
+ //def tester[E: ExpressionEncoder[E]] = implicitly[ExpressionEncoder[E]]
+
+}