aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaMacros.scala
blob: 2e45b1480142050381984953cbb6e2a4d590a508 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
package org.apache.spark.sql.catalyst

import scala.reflect.macros.blackbox.Context
import scala.reflect.api.Universe

import scala.reflect.ClassTag
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
//import org.apache.spark.sql.catalyst.{InternalRow, JavaTypeInference, ScalaReflection}
import org.apache.spark.sql.catalyst.analysis.{Analyzer, GetColumnByOrdinal, SimpleAnalyzer, UnresolvedAttribute, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{GenerateSafeProjection, GenerateUnsafeProjection}
import org.apache.spark.sql.catalyst.expressions.objects.{AssertNotNull, Invoke, NewInstance}
import org.apache.spark.sql.catalyst.optimizer.SimplifyCasts
import org.apache.spark.sql.catalyst.plans.logical.{CatalystSerde, DeserializeToObject, LocalRelation}
import org.apache.spark.sql.types.{BooleanType, ObjectType, StructField, StructType}
import org.apache.spark.util.Utils

/** Type parameter is required for a workaround described here http://docs.scala-lang.org/overviews/macros/overview.html
  * TODO: remove the type parameter once Scala 2.10 is dropped */
class MacrosHelper[C <: Context](val context: C) extends ScalaReflection {

  val universe: context.universe.type = context.universe

  val mirror: universe.Mirror = context.mirror

  import universe._

  def generate[T: context.WeakTypeTag]: context.Tree = {
    val tag = implicitly[WeakTypeTag[T]]
    val tpe = tag.tpe

    val flat = !definedByConstructorParams(tpe)
    val inputObject = BoundReference(0, dataTypeFor[T](tag), nullable = true)
    val nullSafeInput = if (flat) {
      inputObject
    } else {
      // For input object of non-flat type, we can't encode it to row if it's null, as Spark SQL
      // doesn't allow top-level row to be null, only its columns can be null.
      AssertNotNull(inputObject, Seq("top level non-flat input object"))
    }
    val serializer = serializerFor[T](nullSafeInput)(tag)
    val deserializer = deserializerFor[T](tag)

    val schema = schemaFor[T](tag) match {
      case Schema(s: StructType, _) => s
      case Schema(dt, nullable) => new StructType().add("value", dt, nullable)
    }

    q"""new _root_.org.apache.spark.sql.catalyst.encoders.ExpressionEncoder[T](
      ${reify(schema)},
      ${reify(flat)},
      ${reify(serializer.flatten)},
      ${reify(deserializer)},
      classTag[$tpe])"""
  }

}


import scala.language.experimental.macros

object ScalaMacros {

  final def newEncoder[T]: ExpressionEncoder[T] = macro newEncoderImpl[T]

  def newEncoderImpl[T: c.WeakTypeTag](c: Context): c.Expr[ExpressionEncoder[T]] = {
    val helper = new MacrosHelper[c.type](c)
    c.Expr[ExpressionEncoder[T]](helper.generate)
  }

}