diff options
author | gatorsmile <gatorsmile@gmail.com> | 2016-03-11 11:59:18 +0800 |
---|---|---|
committer | Wenchen Fan <wenchen@databricks.com> | 2016-03-11 11:59:18 +0800 |
commit | 560489f4e16ff18b5e66e7de1bb84d890369a462 (patch) | |
tree | 32e11c66ef3c62e174c722fa04539a12be44a42d /sql/core | |
parent | 4d535d1f1c19faa43f96433aee8760e37b1690ea (diff) | |
download | spark-560489f4e16ff18b5e66e7de1bb84d890369a462.tar.gz spark-560489f4e16ff18b5e66e7de1bb84d890369a462.tar.bz2 spark-560489f4e16ff18b5e66e7de1bb84d890369a462.zip |
[SPARK-13732][SPARK-13797][SQL] Remove projectList from Window and Eliminate useless Window
#### What changes were proposed in this pull request?
`projectList` is useless. Its value is always the same as the child.output. Remove it from the class `Window`. Removal can simplify the codes in Analyzer and Optimizer.
This PR is based on the discussion started by cloud-fan in a separate PR:
https://github.com/apache/spark/pull/5604#discussion_r55140466
This PR also eliminates useless `Window`.
cloud-fan yhuai
#### How was this patch tested?
Existing test cases cover it.
Author: gatorsmile <gatorsmile@gmail.com>
Author: xiaoli <lixiao1983@gmail.com>
Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local>
Closes #11565 from gatorsmile/removeProjListWindow.
Diffstat (limited to 'sql/core')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala | 5 | ||||
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala | 6 |
2 files changed, 5 insertions, 6 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index debd04aa95..bae0750788 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala @@ -344,9 +344,8 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] { execution.Filter(condition, planLater(child)) :: Nil case e @ logical.Expand(_, _, child) => execution.Expand(e.projections, e.output, planLater(child)) :: Nil - case logical.Window(projectList, windowExprs, partitionSpec, orderSpec, child) => - execution.Window( - projectList, windowExprs, partitionSpec, orderSpec, planLater(child)) :: Nil + case logical.Window(windowExprs, partitionSpec, orderSpec, child) => + execution.Window(windowExprs, partitionSpec, orderSpec, planLater(child)) :: Nil case logical.Sample(lb, ub, withReplacement, seed, child) => execution.Sample(lb, ub, withReplacement, seed, planLater(child)) :: Nil case logical.LocalRelation(output, data) => diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala index 84154a47de..a4c0e1c9fb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala @@ -81,14 +81,14 @@ import org.apache.spark.util.collection.unsafe.sort.{UnsafeExternalSorter, Unsaf * of specialized classes: [[RowBoundOrdering]] & [[RangeBoundOrdering]]. */ case class Window( - projectList: Seq[Attribute], windowExpression: Seq[NamedExpression], partitionSpec: Seq[Expression], orderSpec: Seq[SortOrder], child: SparkPlan) extends UnaryNode { - override def output: Seq[Attribute] = projectList ++ windowExpression.map(_.toAttribute) + override def output: Seq[Attribute] = + child.output ++ windowExpression.map(_.toAttribute) override def requiredChildDistribution: Seq[Distribution] = { if (partitionSpec.isEmpty) { @@ -275,7 +275,7 @@ case class Window( val unboundToRefMap = expressions.zip(references).toMap val patchedWindowExpression = windowExpression.map(_.transform(unboundToRefMap)) UnsafeProjection.create( - projectList ++ patchedWindowExpression, + child.output ++ patchedWindowExpression, child.output) } |