aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala19
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala27
2 files changed, 45 insertions, 1 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index 0369129393..cd04bdf02c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -187,6 +187,14 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
} else {
arg
}
+ case Some(arg: TreeNode[_]) if children contains arg =>
+ val newChild = arg.asInstanceOf[BaseType].transformDown(rule)
+ if (!(newChild fastEquals arg)) {
+ changed = true
+ Some(newChild)
+ } else {
+ Some(arg)
+ }
case m: Map[_,_] => m
case args: Traversable[_] => args.map {
case arg: TreeNode[_] if children contains arg =>
@@ -231,6 +239,14 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
} else {
arg
}
+ case Some(arg: TreeNode[_]) if children contains arg =>
+ val newChild = arg.asInstanceOf[BaseType].transformUp(rule)
+ if (!(newChild fastEquals arg)) {
+ changed = true
+ Some(newChild)
+ } else {
+ Some(arg)
+ }
case m: Map[_,_] => m
case args: Traversable[_] => args.map {
case arg: TreeNode[_] if children contains arg =>
@@ -273,7 +289,8 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] {
} catch {
case e: java.lang.IllegalArgumentException =>
throw new TreeNodeException(
- this, s"Failed to copy node. Is otherCopyArgs specified correctly for $nodeName?")
+ this, s"Failed to copy node. Is otherCopyArgs specified correctly for $nodeName? "
+ + s"Exception message: ${e.getMessage}.")
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
index 1ddc41a731..6344874538 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
@@ -22,6 +22,17 @@ import scala.collection.mutable.ArrayBuffer
import org.scalatest.FunSuite
import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.types.{StringType, NullType}
+
+case class Dummy(optKey: Option[Expression]) extends Expression {
+ def children = optKey.toSeq
+ def references = Set.empty[Attribute]
+ def nullable = true
+ def dataType = NullType
+ override lazy val resolved = true
+ type EvaluatedType = Any
+ def eval(input: Row) = null.asInstanceOf[Any]
+}
class TreeNodeSuite extends FunSuite {
test("top node changed") {
@@ -75,4 +86,20 @@ class TreeNodeSuite extends FunSuite {
assert(expected === actual)
}
+
+ test("transform works on nodes with Option children") {
+ val dummy1 = Dummy(Some(Literal("1", StringType)))
+ val dummy2 = Dummy(None)
+ val toZero: PartialFunction[Expression, Expression] = { case Literal(_, _) => Literal(0) }
+
+ var actual = dummy1 transformDown toZero
+ assert(actual === Dummy(Some(Literal(0))))
+
+ actual = dummy1 transformUp toZero
+ assert(actual === Dummy(Some(Literal(0))))
+
+ actual = dummy2 transform toZero
+ assert(actual === Dummy(None))
+ }
+
}