aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-05-18 11:51:50 +0100
committerSean Owen <sowen@cloudera.com>2016-05-18 11:51:50 +0100
commitd2f81df1ba67f5d3fc39719d9a67c52d7903e59e (patch)
tree8989db2c264e1af7d432110cde039ef47fe517a9 /sql
parent2f9047b5eb969e0198b8a73e392642ca852ba786 (diff)
downloadspark-d2f81df1ba67f5d3fc39719d9a67c52d7903e59e.tar.gz
spark-d2f81df1ba67f5d3fc39719d9a67c52d7903e59e.tar.bz2
spark-d2f81df1ba67f5d3fc39719d9a67c52d7903e59e.zip
[MINOR][SQL] Remove unused pattern matching variables in Optimizers.
## What changes were proposed in this pull request? This PR removes unused pattern matching variable in Optimizers in order to improve readability. ## How was this patch tested? Pass the existing Jenkins tests. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #13145 from dongjoon-hyun/remove_unused_pattern_matching_variables.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala33
1 files changed, 16 insertions, 17 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index af7532e0c0..be9f03d4ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -151,9 +151,8 @@ object SamplePushDown extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
// Push down projection into sample
- case Project(projectList, s @ Sample(lb, up, replace, seed, child)) =>
- Sample(lb, up, replace, seed,
- Project(projectList, child))()
+ case Project(projectList, Sample(lb, up, replace, seed, child)) =>
+ Sample(lb, up, replace, seed, Project(projectList, child))()
}
}
@@ -226,7 +225,7 @@ object LimitPushDown extends Rule[LogicalPlan] {
private def stripGlobalLimitIfPresent(plan: LogicalPlan): LogicalPlan = {
plan match {
- case GlobalLimit(expr, child) => child
+ case GlobalLimit(_, child) => child
case _ => plan
}
}
@@ -259,7 +258,7 @@ object LimitPushDown extends Rule[LogicalPlan] {
// - If one side is already limited, stack another limit on top if the new limit is smaller.
// The redundant limit will be collapsed by the CombineLimits rule.
// - If neither side is limited, limit the side that is estimated to be bigger.
- case LocalLimit(exp, join @ Join(left, right, joinType, condition)) =>
+ case LocalLimit(exp, join @ Join(left, right, joinType, _)) =>
val newJoin = joinType match {
case RightOuter => join.copy(right = maybePushLimit(exp, right))
case LeftOuter => join.copy(left = maybePushLimit(exp, left))
@@ -408,7 +407,7 @@ object ColumnPruning extends Rule[LogicalPlan] {
p.copy(child = g.copy(join = false))
// Eliminate unneeded attributes from right side of a Left Existence Join.
- case j @ Join(left, right, LeftExistence(_), condition) =>
+ case j @ Join(_, right, LeftExistence(_), _) =>
j.copy(right = prunedChild(right, j.references))
// all the columns will be used to compare, so we can't prune them
@@ -440,10 +439,10 @@ object ColumnPruning extends Rule[LogicalPlan] {
case w: Window if w.windowExpressions.isEmpty => w.child
// Eliminate no-op Projects
- case p @ Project(projectList, child) if sameOutput(child.output, p.output) => child
+ case p @ Project(_, child) if sameOutput(child.output, p.output) => child
// Can't prune the columns on LeafNode
- case p @ Project(_, l: LeafNode) => p
+ case p @ Project(_, _: LeafNode) => p
// for all other logical plans that inherits the output from it's children
case p @ Project(_, child) =>
@@ -541,7 +540,7 @@ object CollapseProject extends Rule[LogicalPlan] {
*/
object CollapseRepartition extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
- case r @ Repartition(numPartitions, shuffle, Repartition(_, _, child)) =>
+ case Repartition(numPartitions, shuffle, Repartition(_, _, child)) =>
Repartition(numPartitions, shuffle, child)
}
}
@@ -917,7 +916,7 @@ object CombineUnions extends Rule[LogicalPlan] {
*/
object CombineFilters extends Rule[LogicalPlan] with PredicateHelper {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
- case ff @ Filter(fc, nf @ Filter(nc, grandChild)) =>
+ case Filter(fc, nf @ Filter(nc, grandChild)) =>
(ExpressionSet(splitConjunctivePredicates(fc)) --
ExpressionSet(splitConjunctivePredicates(nc))).reduceOption(And) match {
case Some(ac) =>
@@ -1071,9 +1070,9 @@ object PushDownPredicate extends Rule[LogicalPlan] with PredicateHelper {
}
// two filters should be combine together by other rules
- case filter @ Filter(_, f: Filter) => filter
+ case filter @ Filter(_, _: Filter) => filter
// should not push predicates through sample, or will generate different results.
- case filter @ Filter(_, s: Sample) => filter
+ case filter @ Filter(_, _: Sample) => filter
case filter @ Filter(condition, u: UnaryNode) if u.expressions.forall(_.deterministic) =>
pushDownPredicate(filter, u.child) { predicate =>
@@ -1352,11 +1351,11 @@ object RemoveDispensableExpressions extends Rule[LogicalPlan] {
*/
object CombineLimits extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
- case ll @ GlobalLimit(le, nl @ GlobalLimit(ne, grandChild)) =>
+ case GlobalLimit(le, GlobalLimit(ne, grandChild)) =>
GlobalLimit(Least(Seq(ne, le)), grandChild)
- case ll @ LocalLimit(le, nl @ LocalLimit(ne, grandChild)) =>
+ case LocalLimit(le, LocalLimit(ne, grandChild)) =>
LocalLimit(Least(Seq(ne, le)), grandChild)
- case ll @ Limit(le, nl @ Limit(ne, grandChild)) =>
+ case Limit(le, Limit(ne, grandChild)) =>
Limit(Least(Seq(ne, le)), grandChild)
}
}
@@ -1588,7 +1587,7 @@ object EmbedSerializerInFilter extends Rule[LogicalPlan] {
*/
object RewritePredicateSubquery extends Rule[LogicalPlan] with PredicateHelper {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
- case f @ Filter(condition, child) =>
+ case Filter(condition, child) =>
val (withSubquery, withoutSubquery) =
splitConjunctivePredicates(condition).partition(PredicateSubquery.hasPredicateSubquery)
@@ -1619,7 +1618,7 @@ object RewritePredicateSubquery extends Rule[LogicalPlan] with PredicateHelper {
val replaced = predicate transformUp {
case PredicateSubquery(sub, conditions, nullAware, _) =>
// TODO: support null-aware join
- val exists = AttributeReference("exists", BooleanType, false)()
+ val exists = AttributeReference("exists", BooleanType, nullable = false)()
joined = Join(joined, sub, ExistenceJoin(exists), conditions.reduceLeftOption(And))
exists
}