aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-09-27 10:20:30 -0700
committerHerman van Hovell <hvanhovell@databricks.com>2016-09-27 10:20:30 -0700
commit120723f934dc386a46a043d2833bfcee60d14e74 (patch)
tree86096049742093476ce81188ea32deeded5edb32
parent2cac3b2d4a4a4f3d0d45af4defc23bb0ba53484b (diff)
downloadspark-120723f934dc386a46a043d2833bfcee60d14e74.tar.gz
spark-120723f934dc386a46a043d2833bfcee60d14e74.tar.bz2
spark-120723f934dc386a46a043d2833bfcee60d14e74.zip
[SPARK-17682][SQL] Mark children as final for unary, binary, leaf expressions and plan nodes
## What changes were proposed in this pull request? This patch marks the children method as final in unary, binary, and leaf expressions and plan nodes (both logical plan and physical plan), as brought up in http://apache-spark-developers-list.1001551.n3.nabble.com/Should-LeafExpression-have-children-final-override-like-Nondeterministic-td19104.html ## How was this patch tested? This is a simple modifier change and has no impact on test coverage. Author: Reynold Xin <rxin@databricks.com> Closes #15256 from rxin/SPARK-17682.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Command.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala6
5 files changed, 9 insertions, 14 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 7abbbe257d..fa1a2ad56c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -295,7 +295,7 @@ trait Nondeterministic extends Expression {
*/
abstract class LeafExpression extends Expression {
- def children: Seq[Expression] = Nil
+ override final def children: Seq[Expression] = Nil
}
@@ -307,7 +307,7 @@ abstract class UnaryExpression extends Expression {
def child: Expression
- override def children: Seq[Expression] = child :: Nil
+ override final def children: Seq[Expression] = child :: Nil
override def foldable: Boolean = child.foldable
override def nullable: Boolean = child.nullable
@@ -394,7 +394,7 @@ abstract class BinaryExpression extends Expression {
def left: Expression
def right: Expression
- override def children: Seq[Expression] = Seq(left, right)
+ override final def children: Seq[Expression] = Seq(left, right)
override def foldable: Boolean = left.foldable && right.foldable
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index 9d5c856a23..f74208ff66 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -152,8 +152,6 @@ case class Stack(children: Seq[Expression])
abstract class ExplodeBase(child: Expression, position: Boolean)
extends UnaryExpression with Generator with CodegenFallback with Serializable {
- override def children: Seq[Expression] = child :: Nil
-
override def checkInputDataTypes(): TypeCheckResult = {
if (child.dataType.isInstanceOf[ArrayType] || child.dataType.isInstanceOf[MapType]) {
TypeCheckResult.TypeCheckSuccess
@@ -257,8 +255,6 @@ case class PosExplode(child: Expression) extends ExplodeBase(child, position = t
extended = "> SELECT _FUNC_(array(struct(1, 'a'), struct(2, 'b')));\n [1,a]\n [2,b]")
case class Inline(child: Expression) extends UnaryExpression with Generator with CodegenFallback {
- override def children: Seq[Expression] = child :: Nil
-
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case ArrayType(et, _) if et.isInstanceOf[StructType] =>
TypeCheckResult.TypeCheckSuccess
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Command.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Command.scala
index 64f57835c8..38f47081b6 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Command.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Command.scala
@@ -25,6 +25,5 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
* eagerly executed.
*/
trait Command extends LeafNode {
- final override def children: Seq[LogicalPlan] = Seq.empty
override def output: Seq[Attribute] = Seq.empty
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index 6d7799151d..09725473a3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -276,7 +276,7 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
* A logical plan node with no children.
*/
abstract class LeafNode extends LogicalPlan {
- override def children: Seq[LogicalPlan] = Nil
+ override final def children: Seq[LogicalPlan] = Nil
override def producedAttributes: AttributeSet = outputSet
}
@@ -286,7 +286,7 @@ abstract class LeafNode extends LogicalPlan {
abstract class UnaryNode extends LogicalPlan {
def child: LogicalPlan
- override def children: Seq[LogicalPlan] = child :: Nil
+ override final def children: Seq[LogicalPlan] = child :: Nil
/**
* Generates an additional set of aliased constraints by replacing the original constraint
@@ -330,5 +330,5 @@ abstract class BinaryNode extends LogicalPlan {
def left: LogicalPlan
def right: LogicalPlan
- override def children: Seq[LogicalPlan] = Seq(left, right)
+ override final def children: Seq[LogicalPlan] = Seq(left, right)
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
index 6aeefa6edd..48d6ef6dcd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
@@ -380,7 +380,7 @@ object SparkPlan {
}
trait LeafExecNode extends SparkPlan {
- override def children: Seq[SparkPlan] = Nil
+ override final def children: Seq[SparkPlan] = Nil
override def producedAttributes: AttributeSet = outputSet
}
@@ -394,7 +394,7 @@ object UnaryExecNode {
trait UnaryExecNode extends SparkPlan {
def child: SparkPlan
- override def children: Seq[SparkPlan] = child :: Nil
+ override final def children: Seq[SparkPlan] = child :: Nil
override def outputPartitioning: Partitioning = child.outputPartitioning
}
@@ -403,5 +403,5 @@ trait BinaryExecNode extends SparkPlan {
def left: SparkPlan
def right: SparkPlan
- override def children: Seq[SparkPlan] = Seq(left, right)
+ override final def children: Seq[SparkPlan] = Seq(left, right)
}