aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorReynold Xin <rxin@apache.org>2014-06-19 22:34:21 -0700
committerReynold Xin <rxin@apache.org>2014-06-19 22:34:30 -0700
commita0e22d3983e1b066373eacc4cc1e2da1229f5d00 (patch)
tree5bdb11412763db89df728b3f2550f767c0172666 /sql/catalyst
parent8aa5951ed1ec3787b650aaee4fd9ec4132ba4365 (diff)
downloadspark-a0e22d3983e1b066373eacc4cc1e2da1229f5d00.tar.gz
spark-a0e22d3983e1b066373eacc4cc1e2da1229f5d00.tar.bz2
spark-a0e22d3983e1b066373eacc4cc1e2da1229f5d00.zip
More minor scaladoc cleanup for Spark SQL.
Author: Reynold Xin <rxin@apache.org> Closes #1142 from rxin/sqlclean and squashes the following commits: 67a789e [Reynold Xin] More minor scaladoc cleanup for Spark SQL. (cherry picked from commit 278ec8a203c7f1de2716d8284f9bdafa54eee1cb) Signed-off-by: Reynold Xin <rxin@apache.org>
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala34
3 files changed, 21 insertions, 23 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index 66bff660ca..6d331fb501 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -33,7 +33,7 @@ object HiveTypeCoercion {
}
/**
- * A collection of [[catalyst.rules.Rule Rules]] that can be used to coerce differing types that
+ * A collection of [[Rule Rules]] that can be used to coerce differing types that
* participate in operations into compatible ones. Most of these rules are based on Hive semantics,
* but they do not introduce any dependencies on the hive codebase. For this reason they remain in
* Catalyst until we have a more standard set of coercions.
@@ -53,8 +53,8 @@ trait HiveTypeCoercion {
Nil
/**
- * Applies any changes to [[catalyst.expressions.AttributeReference AttributeReference]] data
- * types that are made by other rules to instances higher in the query tree.
+ * Applies any changes to [[AttributeReference]] data types that are made by other rules to
+ * instances higher in the query tree.
*/
object PropagateTypes extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
@@ -244,7 +244,7 @@ trait HiveTypeCoercion {
}
/**
- * Casts to/from [[catalyst.types.BooleanType BooleanType]] are transformed into comparisons since
+ * Casts to/from [[BooleanType]] are transformed into comparisons since
* the JVM does not consider Booleans to be numeric types.
*/
object BooleanCasts extends Rule[LogicalPlan] {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index a8145c37c2..66ae22e95b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -103,7 +103,7 @@ case class Alias(child: Expression, name: String)
* A reference to an attribute produced by another operator in the tree.
*
* @param name The name of this attribute, should only be used during analysis or for debugging.
- * @param dataType The [[types.DataType DataType]] of this attribute.
+ * @param dataType The [[DataType]] of this attribute.
* @param nullable True if null is a valid value for this attribute.
* @param exprId A globally unique id used to check if different AttributeReferences refer to the
* same attribute.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 25a347bec0..b20b5de8c4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -95,13 +95,13 @@ object ColumnPruning extends Rule[LogicalPlan] {
Project(substitutedProjection, child)
// Eliminate no-op Projects
- case Project(projectList, child) if(child.output == projectList) => child
+ case Project(projectList, child) if child.output == projectList => child
}
}
/**
- * Replaces [[catalyst.expressions.Expression Expressions]] that can be statically evaluated with
- * equivalent [[catalyst.expressions.Literal Literal]] values. This rule is more specific with
+ * Replaces [[Expression Expressions]] that can be statically evaluated with
+ * equivalent [[Literal]] values. This rule is more specific with
* Null value propagation from bottom to top of the expression tree.
*/
object NullPropagation extends Rule[LogicalPlan] {
@@ -110,8 +110,8 @@ object NullPropagation extends Rule[LogicalPlan] {
case e @ Count(Literal(null, _)) => Cast(Literal(0L), e.dataType)
case e @ Sum(Literal(c, _)) if c == 0 => Cast(Literal(0L), e.dataType)
case e @ Average(Literal(c, _)) if c == 0 => Literal(0.0, e.dataType)
- case e @ IsNull(c) if c.nullable == false => Literal(false, BooleanType)
- case e @ IsNotNull(c) if c.nullable == false => Literal(true, BooleanType)
+ case e @ IsNull(c) if !c.nullable => Literal(false, BooleanType)
+ case e @ IsNotNull(c) if !c.nullable => Literal(true, BooleanType)
case e @ GetItem(Literal(null, _), _) => Literal(null, e.dataType)
case e @ GetItem(_, Literal(null, _)) => Literal(null, e.dataType)
case e @ GetField(Literal(null, _), _) => Literal(null, e.dataType)
@@ -154,8 +154,8 @@ object NullPropagation extends Rule[LogicalPlan] {
}
/**
- * Replaces [[catalyst.expressions.Expression Expressions]] that can be statically evaluated with
- * equivalent [[catalyst.expressions.Literal Literal]] values.
+ * Replaces [[Expression Expressions]] that can be statically evaluated with
+ * equivalent [[Literal]] values.
*/
object ConstantFolding extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
@@ -197,7 +197,7 @@ object BooleanSimplification extends Rule[LogicalPlan] {
}
/**
- * Combines two adjacent [[catalyst.plans.logical.Filter Filter]] operators into one, merging the
+ * Combines two adjacent [[Filter]] operators into one, merging the
* conditions into one conjunctive predicate.
*/
object CombineFilters extends Rule[LogicalPlan] {
@@ -223,9 +223,8 @@ object SimplifyFilters extends Rule[LogicalPlan] {
}
/**
- * Pushes [[catalyst.plans.logical.Filter Filter]] operators through
- * [[catalyst.plans.logical.Project Project]] operators, in-lining any
- * [[catalyst.expressions.Alias Aliases]] that were defined in the projection.
+ * Pushes [[Filter]] operators through [[Project]] operators, in-lining any [[Alias Aliases]]
+ * that were defined in the projection.
*
* This heuristic is valid assuming the expression evaluation cost is minimal.
*/
@@ -248,10 +247,10 @@ object PushPredicateThroughProject extends Rule[LogicalPlan] {
}
/**
- * Pushes down [[catalyst.plans.logical.Filter Filter]] operators where the `condition` can be
+ * Pushes down [[Filter]] operators where the `condition` can be
* evaluated using only the attributes of the left or right side of a join. Other
- * [[catalyst.plans.logical.Filter Filter]] conditions are moved into the `condition` of the
- * [[catalyst.plans.logical.Join Join]].
+ * [[Filter]] conditions are moved into the `condition` of the [[Join]].
+ *
* And also Pushes down the join filter, where the `condition` can be evaluated using only the
* attributes of the left or right side of sub query when applicable.
*
@@ -345,8 +344,7 @@ object PushPredicateThroughJoin extends Rule[LogicalPlan] with PredicateHelper {
}
/**
- * Removes [[catalyst.expressions.Cast Casts]] that are unnecessary because the input is already
- * the correct type.
+ * Removes [[Cast Casts]] that are unnecessary because the input is already the correct type.
*/
object SimplifyCasts extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transformAllExpressions {
@@ -355,7 +353,7 @@ object SimplifyCasts extends Rule[LogicalPlan] {
}
/**
- * Combines two adjacent [[catalyst.plans.logical.Limit Limit]] operators into one, merging the
+ * Combines two adjacent [[Limit]] operators into one, merging the
* expressions into one single expression.
*/
object CombineLimits extends Rule[LogicalPlan] {
@@ -366,7 +364,7 @@ object CombineLimits extends Rule[LogicalPlan] {
}
/**
- * Removes the inner [[catalyst.expressions.CaseConversionExpression]] that are unnecessary because
+ * Removes the inner [[CaseConversionExpression]] that are unnecessary because
* the inner conversion is overwritten by the outer one.
*/
object SimplifyCaseConversionExpressions extends Rule[LogicalPlan] {