aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorCheng Lian <lian.cs.zju@gmail.com>2014-03-23 15:21:40 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-03-23 15:21:40 -0700
commit8265dc7739caccc59bc2456b2df055ca96337fe4 (patch)
tree5c68d011bf66a81471c118c85034687fab48ab55 /sql/catalyst
parent57a4379c031e5d5901ba580422207d6aa2f19749 (diff)
downloadspark-8265dc7739caccc59bc2456b2df055ca96337fe4.tar.gz
spark-8265dc7739caccc59bc2456b2df055ca96337fe4.tar.bz2
spark-8265dc7739caccc59bc2456b2df055ca96337fe4.zip
Fixed coding style issues in Spark SQL
This PR addresses various coding style issues in Spark SQL, including but not limited to those mentioned by @mateiz in PR #146. As this PR affects lots of source files and may cause potential conflicts, it would be better to merge this as soon as possible *after* PR #205 (In-memory columnar representation for Spark SQL) is merged. Author: Cheng Lian <lian.cs.zju@gmail.com> Closes #208 from liancheng/fixCodingStyle and squashes the following commits: fc2b528 [Cheng Lian] Merge branch 'master' into fixCodingStyle b531273 [Cheng Lian] Fixed coding style issues in sql/hive 0b56f77 [Cheng Lian] Fixed coding style issues in sql/core fae7b02 [Cheng Lian] Addressed styling issues mentioned by @marmbrus 9265366 [Cheng Lian] Fixed coding style issues in sql/core 3dcbbbd [Cheng Lian] Fixed relative package imports for package catalyst
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala69
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala28
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala1
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala56
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala25
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala3
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala14
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala)16
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala)3
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala)5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala19
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala13
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala12
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala)6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala)19
48 files changed, 213 insertions, 203 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
new file mode 100644
index 0000000000..bf7318d2e0
--- /dev/null
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql
+package catalyst
+
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.catalyst.expressions.AttributeReference
+import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
+import org.apache.spark.sql.catalyst.types._
+
+/**
+ * Provides experimental support for generating catalyst schemas for scala objects.
+ */
+object ScalaReflection {
+ import scala.reflect.runtime.universe._
+
+ /** Returns a Sequence of attributes for the given case class type. */
+ def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
+ case s: StructType =>
+ s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
+ }
+
+ /** Returns a catalyst DataType for the given Scala Type using reflection. */
+ def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
+
+ /** Returns a catalyst DataType for the given Scala Type using reflection. */
+ def schemaFor(tpe: `Type`): DataType = tpe match {
+ case t if t <:< typeOf[Product] =>
+ val params = t.member("<init>": TermName).asMethod.paramss
+ StructType(
+ params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
+ case t if t <:< typeOf[Seq[_]] =>
+ val TypeRef(_, _, Seq(elementType)) = t
+ ArrayType(schemaFor(elementType))
+ case t if t <:< typeOf[String] => StringType
+ case t if t <:< definitions.IntTpe => IntegerType
+ case t if t <:< definitions.LongTpe => LongType
+ case t if t <:< definitions.DoubleTpe => DoubleType
+ case t if t <:< definitions.ShortTpe => ShortType
+ case t if t <:< definitions.ByteTpe => ByteType
+ }
+
+ implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
+
+ /**
+ * Implicitly added to Sequences of case class objects. Returns a catalyst logical relation
+ * for the the data in the sequence.
+ */
+ def asRelation: LocalRelation = {
+ val output = attributesFor[A]
+ LocalRelation(output, data)
+ }
+ }
+}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
index 919bf4dbc8..9dec4e3d9e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala
@@ -17,21 +17,18 @@
package org.apache.spark.sql.catalyst
-import scala.util.matching.Regex
-import scala.util.parsing.combinator._
+import scala.util.parsing.combinator.lexical.StdLexical
+import scala.util.parsing.combinator.syntactical.StandardTokenParsers
import scala.util.parsing.input.CharArrayReader.EofCh
-import lexical._
-import syntactical._
-import token._
-import analysis._
-import expressions._
-import plans._
-import plans.logical._
-import types._
+import org.apache.spark.sql.catalyst.analysis._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.types._
/**
- * A very simple SQL parser. Based loosly on:
+ * A very simple SQL parser. Based loosely on:
* https://github.com/stephentu/scala-sql-parser/blob/master/src/main/scala/parser.scala
*
* Limitations:
@@ -39,10 +36,9 @@ import types._
* - Keywords must be capital.
*
* This is currently included mostly for illustrative purposes. Users wanting more complete support
- * for a SQL like language should checkout the HiveQL support in the sql/hive subproject.
+ * for a SQL like language should checkout the HiveQL support in the sql/hive sub-project.
*/
class SqlParser extends StandardTokenParsers {
-
def apply(input: String): LogicalPlan = {
phrase(query)(new lexical.Scanner(input)) match {
case Success(r, x) => r
@@ -196,7 +192,7 @@ class SqlParser extends StandardTokenParsers {
protected lazy val from: Parser[LogicalPlan] = FROM ~> relations
- // Based very loosly on the MySQL Grammar.
+ // Based very loosely on the MySQL Grammar.
// http://dev.mysql.com/doc/refman/5.0/en/join.html
protected lazy val relations: Parser[LogicalPlan] =
relation ~ "," ~ relation ^^ { case r1 ~ _ ~ r2 => Join(r1, r2, Inner, None) } |
@@ -261,9 +257,9 @@ class SqlParser extends StandardTokenParsers {
andExpression * (OR ^^^ { (e1: Expression, e2: Expression) => Or(e1,e2) })
protected lazy val andExpression: Parser[Expression] =
- comparisionExpression * (AND ^^^ { (e1: Expression, e2: Expression) => And(e1,e2) })
+ comparisonExpression * (AND ^^^ { (e1: Expression, e2: Expression) => And(e1,e2) })
- protected lazy val comparisionExpression: Parser[Expression] =
+ protected lazy val comparisonExpression: Parser[Expression] =
termExpression ~ "=" ~ termExpression ^^ { case e1 ~ _ ~ e2 => Equals(e1, e2) } |
termExpression ~ "<" ~ termExpression ^^ { case e1 ~ _ ~ e2 => LessThan(e1, e2) } |
termExpression ~ "<=" ~ termExpression ^^ { case e1 ~ _ ~ e2 => LessThanOrEqual(e1, e2) } |
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 9eb992ee58..fc76e76617 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -19,9 +19,10 @@ package org.apache.spark.sql
package catalyst
package analysis
-import expressions._
-import plans.logical._
-import rules._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
+
/**
* A trivial [[Analyzer]] with an [[EmptyCatalog]] and [[EmptyFunctionRegistry]]. Used for testing
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index 71e4dcdb15..b77f0bbb2f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -19,9 +19,10 @@ package org.apache.spark.sql
package catalyst
package analysis
-import plans.logical.{LogicalPlan, Subquery}
import scala.collection.mutable
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
+
/**
* An interface for looking up relations by name. Used by an [[Analyzer]].
*/
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index a359eb5411..eed058d3c7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package analysis
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.Expression
/** A catalog for looking up user defined functions, used by an [[Analyzer]]. */
trait FunctionRegistry {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
index a0105cd7cf..a6ecf6e2eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercion.scala
@@ -19,10 +19,10 @@ package org.apache.spark.sql
package catalyst
package analysis
-import expressions._
-import plans.logical._
-import rules._
-import types._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, Union}
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.types._
/**
* A collection of [[catalyst.rules.Rule Rules]] that can be used to coerce differing types that
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
index fe18cc466f..3cad3a5d4d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
@@ -18,14 +18,14 @@
package org.apache.spark.sql.catalyst
package analysis
-import plans.logical.LogicalPlan
-import rules._
+import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
/**
* A trait that should be mixed into query operators where an single instance might appear multiple
* times in a logical query plan. It is invalid to have multiple copies of the same attribute
- * produced by distinct operators in a query tree as this breaks the gurantee that expression
- * ids, which are used to differentate attributes, are unique.
+ * produced by distinct operators in a query tree as this breaks the guarantee that expression
+ * ids, which are used to differentiate attributes, are unique.
*
* Before analysis, all operators that include this trait will be asked to produce a new version
* of itself with globally unique expression ids.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
index 375c99f48e..30c55bacc7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
@@ -15,6 +15,7 @@
* limitations under the License.
*/
+package org.apache.spark.sql
package catalyst
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 2ed2af1352..04ae481102 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -19,9 +19,9 @@ package org.apache.spark.sql
package catalyst
package analysis
-import expressions._
-import plans.logical.BaseRelation
-import trees.TreeNode
+import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Expression, NamedExpression}
+import org.apache.spark.sql.catalyst.plans.logical.BaseRelation
+import org.apache.spark.sql.catalyst.trees.TreeNode
/**
* Thrown when an invalid attempt is made to access a property of a tree that has yet to be fully
@@ -95,7 +95,7 @@ case class Star(
// If there is no table specified, use all input attributes.
case None => input
// If there is a table, pick out attributes that are part of this table.
- case Some(table) => input.filter(_.qualifiers contains table)
+ case Some(t) => input.filter(_.qualifiers contains t)
}
val mappedAttributes = expandedAttributes.map(mapFunction).zip(input).map {
case (n: NamedExpression, _) => n
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index cd8de9d52f..e6255bcafa 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -19,58 +19,12 @@ package org.apache.spark.sql
package catalyst
import scala.language.implicitConversions
-import scala.reflect.runtime.universe.TypeTag
-import analysis.UnresolvedAttribute
-import expressions._
-import plans._
-import plans.logical._
-import types._
-
-/**
- * Provides experimental support for generating catalyst schemas for scala objects.
- */
-object ScalaReflection {
- import scala.reflect.runtime.universe._
-
- /** Returns a Sequence of attributes for the given case class type. */
- def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
- case s: StructType =>
- s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
- }
-
- /** Returns a catalyst DataType for the given Scala Type using reflection. */
- def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
-
- /** Returns a catalyst DataType for the given Scala Type using reflection. */
- def schemaFor(tpe: `Type`): DataType = tpe match {
- case t if t <:< typeOf[Product] =>
- val params = t.member("<init>": TermName).asMethod.paramss
- StructType(
- params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
- case t if t <:< typeOf[Seq[_]] =>
- val TypeRef(_, _, Seq(elementType)) = t
- ArrayType(schemaFor(elementType))
- case t if t <:< typeOf[String] => StringType
- case t if t <:< definitions.IntTpe => IntegerType
- case t if t <:< definitions.LongTpe => LongType
- case t if t <:< definitions.DoubleTpe => DoubleType
- case t if t <:< definitions.ShortTpe => ShortType
- case t if t <:< definitions.ByteTpe => ByteType
- }
-
- implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
-
- /**
- * Implicitly added to Sequences of case class objects. Returns a catalyst logical relation
- * for the the data in the sequence.
- */
- def asRelation: LocalRelation = {
- val output = attributesFor[A]
- LocalRelation(output, data)
- }
- }
-}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
+import org.apache.spark.sql.catalyst.types._
/**
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
index c253587f67..d8b69946fa 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala
@@ -18,15 +18,16 @@
package org.apache.spark.sql
package catalyst
-import trees._
+import org.apache.spark.sql.catalyst.trees.TreeNode
/**
* Functions for attaching and retrieving trees that are associated with errors.
*/
package object errors {
- class TreeNodeException[TreeType <: TreeNode[_]]
- (tree: TreeType, msg: String, cause: Throwable) extends Exception(msg, cause) {
+ class TreeNodeException[TreeType <: TreeNode[_]](
+ tree: TreeType, msg: String, cause: Throwable)
+ extends Exception(msg, cause) {
// Yes, this is the same as a default parameter, but... those don't seem to work with SBT
// external project dependencies for some reason.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
index 3b6bac16ff..3fa4148f0d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala
@@ -19,10 +19,9 @@ package org.apache.spark.sql
package catalyst
package expressions
-import rules._
-import errors._
-
-import catalyst.plans.QueryPlan
+import org.apache.spark.sql.catalyst.errors.attachTree
+import org.apache.spark.sql.catalyst.plans.QueryPlan
+import org.apache.spark.sql.catalyst.rules.Rule
/**
* A bound reference points to a specific slot in the input tuple, allowing the actual value
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 608656d3a9..71f64ef950 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
+import org.apache.spark.sql.catalyst.types._
/** Cast the child expression to the target data type. */
case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
@@ -40,7 +40,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
case (StringType, ShortType) => a: Any => castOrNull(a, _.toShort)
case (StringType, ByteType) => a: Any => castOrNull(a, _.toByte)
case (StringType, DecimalType) => a: Any => castOrNull(a, BigDecimal(_))
- case (BooleanType, ByteType) => a: Any => a match {
+ case (BooleanType, ByteType) => {
case null => null
case true => 1.toByte
case false => 0.toByte
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 78aaaeebbd..2454a3355b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -19,9 +19,9 @@ package org.apache.spark.sql
package catalyst
package expressions
-import errors._
-import trees._
-import types._
+import org.apache.spark.sql.catalyst.trees.TreeNode
+import org.apache.spark.sql.catalyst.types.{DataType, FractionalType, IntegralType, NumericType}
+import org.apache.spark.sql.catalyst.errors.TreeNodeException
abstract class Expression extends TreeNode[Expression] {
self: Product =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
index a5d0ecf964..0d173afec8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Rand.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types.DoubleType
+import org.apache.spark.sql.catalyst.types.DoubleType
case object Rand extends LeafExpression {
def dataType = DoubleType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
index 3529675468..79c91ebaa4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
+import org.apache.spark.sql.catalyst.types.NativeType
/**
* Represents one row of output from a relational operator. Allows both generic access by ordinal,
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
index a3c7ca1acd..cc33948055 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUdf.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
+import org.apache.spark.sql.catalyst.types.DataType
case class ScalaUdf(function: AnyRef, dataType: DataType, children: Seq[Expression])
extends Expression {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
index 2ad8d6f31d..01b7a14d4a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/WrapDynamic.scala
@@ -21,7 +21,7 @@ package expressions
import scala.language.dynamics
-import types._
+import org.apache.spark.sql.catalyst.types.DataType
case object DynamicType extends DataType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
index 2287a849e6..a16bb80df3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.types._
+import org.apache.spark.sql.catalyst.types._
abstract class AggregateExpression extends Expression {
self: Product =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index db235645cd..81e4a487bd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.analysis.UnresolvedException
-import catalyst.types._
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.types._
case class UnaryMinus(child: Expression) extends UnaryExpression {
type EvaluatedType = Any
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
index d3feb6c461..9ec0f6ade7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypes.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
+import org.apache.spark.sql.catalyst.types._
/**
* Returns the item at `ordinal` in the Array `child` or the Key `ordinal` in Map `child`.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index c367de2a3e..9097c635ee 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.types._
+import org.apache.spark.sql.catalyst.types._
/**
* An expression that produces zero or more rows given a single input row.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 229d8f7f7b..0d01312c71 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
+import org.apache.spark.sql.catalyst.types._
object Literal {
def apply(v: Any): Literal = v match {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index 0a06e85325..47b1241e71 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.analysis.UnresolvedAttribute
-import types._
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.types._
object NamedExpression {
private val curId = new java.util.concurrent.atomic.AtomicLong()
@@ -30,7 +30,7 @@ object NamedExpression {
/**
* A globally (within this JVM) id for a given named expression.
* Used to identify with attribute output by a relation is being
- * referenced in a subsuqent computation.
+ * referenced in a subsequent computation.
*/
case class ExprId(id: Long)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
index e869a4d9b0..38e38371b1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
case class Coalesce(children: Seq[Expression]) extends Expression {
type EvaluatedType = Any
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index 561396eb43..e7f3e8ca60 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
package catalyst
package expressions
-import types._
-import catalyst.analysis.UnresolvedException
+import org.apache.spark.sql.catalyst.types.{BooleanType, StringType}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedException
trait Predicate extends Expression {
self: Product =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
index 6e585236b1..7584fe03cf 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringOperations.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package expressions
-import catalyst.types.BooleanType
+import org.apache.spark.sql.catalyst.types.BooleanType
case class Like(left: Expression, right: Expression) extends BinaryExpression {
def dataType = BooleanType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 4db2803173..c1201971d9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -19,11 +19,11 @@ package org.apache.spark.sql
package catalyst
package optimizer
-import catalyst.expressions._
-import catalyst.plans.logical._
-import catalyst.rules._
-import catalyst.types.BooleanType
-import catalyst.plans.Inner
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.Inner
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
+import org.apache.spark.sql.catalyst.types._
object Optimizer extends RuleExecutor[LogicalPlan] {
val batches =
@@ -73,7 +73,7 @@ object ConstantFolding extends Rule[LogicalPlan] {
object BooleanSimplification extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
case q: LogicalPlan => q transformExpressionsUp {
- case and @ And(left, right) => {
+ case and @ And(left, right) =>
(left, right) match {
case (Literal(true, BooleanType), r) => r
case (l, Literal(true, BooleanType)) => l
@@ -81,8 +81,8 @@ object BooleanSimplification extends Rule[LogicalPlan] {
case (_, Literal(false, BooleanType)) => Literal(false)
case (_, _) => and
}
- }
- case or @ Or(left, right) => {
+
+ case or @ Or(left, right) =>
(left, right) match {
case (Literal(true, BooleanType), _) => Literal(true)
case (_, Literal(true, BooleanType)) => Literal(true)
@@ -90,7 +90,6 @@ object BooleanSimplification extends Rule[LogicalPlan] {
case (l, Literal(false, BooleanType)) => l
case (_, _) => or
}
- }
}
}
}
@@ -101,7 +100,7 @@ object BooleanSimplification extends Rule[LogicalPlan] {
*/
object CombineFilters extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
- case ff@Filter(fc, nf@Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
+ case ff @ Filter(fc, nf @ Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
}
}
@@ -114,8 +113,10 @@ object CombineFilters extends Rule[LogicalPlan] {
*/
object PushPredicateThroughProject extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
- case filter@Filter(condition, project@Project(fields, grandChild)) =>
- val sourceAliases = fields.collect { case a@Alias(c, _) => a.toAttribute -> c }.toMap
+ case filter @ Filter(condition, project @ Project(fields, grandChild)) =>
+ val sourceAliases = fields.collect { case a @ Alias(c, _) =>
+ (a.toAttribute: Attribute) -> c
+ }.toMap
project.copy(child = filter.copy(
replaceAlias(condition, sourceAliases),
grandChild))
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
index 22f8ea005b..d50b963dfc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/QueryPlanner.scala
@@ -19,9 +19,8 @@ package org.apache.spark.sql
package catalyst
package planning
-
-import plans.logical.LogicalPlan
-import trees._
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.trees.TreeNode
/**
* Abstract class for transforming [[plans.logical.LogicalPlan LogicalPlan]]s into physical plans.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
index 613b028ca8..ff0ea90e54 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
@@ -21,8 +21,8 @@ package planning
import scala.annotation.tailrec
-import expressions._
-import plans.logical._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical._
/**
* A pattern that matches any number of filter operations on top of another relational operator.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
index 20f230c5c4..848db2452a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
package catalyst
package plans
-import catalyst.expressions.{SortOrder, Attribute, Expression}
-import catalyst.trees._
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
+import org.apache.spark.sql.catalyst.trees.TreeNode
abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] {
self: PlanType with Product =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index bc7b6871df..225dd260fb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -20,9 +20,9 @@ package catalyst
package plans
package logical
-import catalyst.expressions._
-import catalyst.errors._
-import catalyst.types.StructType
+import org.apache.spark.sql.catalyst.errors.TreeNodeException
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.types.StructType
abstract class LogicalPlan extends QueryPlan[LogicalPlan] {
self: Product =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
index 1a1a2b9b88..5a3ea9f0a0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/ScriptTransformation.scala
@@ -20,7 +20,7 @@ package catalyst
package plans
package logical
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
/**
* Transforms the input by forking and running the specified script.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
index b5905a4456..ac7d2d6001 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/TestRelation.scala
@@ -20,8 +20,7 @@ package catalyst
package plans
package logical
-import expressions._
-import rules._
+import org.apache.spark.sql.catalyst.expressions.Attribute
object LocalRelation {
def apply(output: Attribute*) =
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index 8e98aab736..6480cca300 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -20,7 +20,7 @@ package catalyst
package plans
package logical
-import expressions._
+import org.apache.spark.sql.catalyst.expressions._
case class Project(projectList: Seq[NamedExpression], child: LogicalPlan) extends UnaryNode {
def output = projectList.map(_.toAttribute)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
index f7fcdc5fdb..775e50bbd5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
@@ -20,7 +20,7 @@ package catalyst
package plans
package logical
-import expressions._
+import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
/**
* Performs a physical redistribution of the data. Used when the consumer of the query
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
index 2d8f3ad335..20e2a45678 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
@@ -20,8 +20,8 @@ package catalyst
package plans
package physical
-import expressions._
-import types._
+import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
+import org.apache.spark.sql.catalyst.types.IntegerType
/**
* Specifies how tuples that share common expressions will be distributed when a query is executed
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
index 6ff4891a3f..c7632a62a0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/Rule.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package rules
-import trees._
+import org.apache.spark.sql.catalyst.trees.TreeNode
abstract class Rule[TreeType <: TreeNode[_]] extends Logging {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
index 68ae30cde1..9db96f89dd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
@@ -19,8 +19,8 @@ package org.apache.spark.sql
package catalyst
package rules
-import trees._
-import util._
+import org.apache.spark.sql.catalyst.trees.TreeNode
+import org.apache.spark.sql.catalyst.util.sideBySide
abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
@@ -52,19 +52,19 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends Logging {
batches.foreach { batch =>
var iteration = 1
var lastPlan = curPlan
- curPlan = batch.rules.foldLeft(curPlan) { case (curPlan, rule) => rule(curPlan) }
+ curPlan = batch.rules.foldLeft(curPlan) { case (plan, rule) => rule(plan) }
// Run until fix point (or the max number of iterations as specified in the strategy.
while (iteration < batch.strategy.maxIterations && !curPlan.fastEquals(lastPlan)) {
lastPlan = curPlan
curPlan = batch.rules.foldLeft(curPlan) {
- case (curPlan, rule) =>
- val result = rule(curPlan)
- if (!result.fastEquals(curPlan)) {
+ case (plan, rule) =>
+ val result = rule(plan)
+ if (!result.fastEquals(plan)) {
logger.debug(
s"""
|=== Applying Rule ${rule.ruleName} ===
- |${sideBySide(curPlan.treeString, result.treeString).mkString("\n")}
+ |${sideBySide(plan.treeString, result.treeString).mkString("\n")}
""".stripMargin)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index 37e557441d..89e27d81da 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql
package catalyst
package trees
-import errors._
+import org.apache.spark.sql.catalyst.errors._
object TreeNode {
private val currentId = new java.util.concurrent.atomic.AtomicLong
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 1fd0d26b6f..78ec48ba77 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -21,13 +21,10 @@ package analysis
import org.scalatest.FunSuite
-import analysis._
-import expressions._
-import plans.logical._
-import types._
+import org.apache.spark.sql.catalyst.plans.logical._
-import dsl._
-import dsl.expressions._
+/* Implicit conversions */
+import org.apache.spark.sql.catalyst.dsl.expressions._
class AnalysisSuite extends FunSuite {
val analyze = SimpleAnalyzer
@@ -35,7 +32,8 @@ class AnalysisSuite extends FunSuite {
val testRelation = LocalRelation('a.int)
test("analyze project") {
- assert(analyze(Project(Seq(UnresolvedAttribute("a")), testRelation)) === Project(testRelation.output, testRelation))
-
+ assert(
+ analyze(Project(Seq(UnresolvedAttribute("a")), testRelation)) ===
+ Project(testRelation.output, testRelation))
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
index f595bf7e44..b85b72a284 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/HiveTypeCoercionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala
@@ -21,8 +21,7 @@ package analysis
import org.scalatest.FunSuite
-import catalyst.types._
-
+import org.apache.spark.sql.catalyst.types._
class HiveTypeCoercionSuite extends FunSuite {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
index f06618ad11..c8fd581aa7 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ExpressionEvaluationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvaluationSuite.scala
@@ -21,10 +21,9 @@ package expressions
import org.scalatest.FunSuite
-import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.types._
-/* Implict conversions */
+/* Implicit conversions */
import org.apache.spark.sql.catalyst.dsl.expressions._
class ExpressionEvaluationSuite extends FunSuite {
@@ -112,4 +111,4 @@ class ExpressionEvaluationSuite extends FunSuite {
}
}
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index 7ce42b2b0a..2c107b865a 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -19,13 +19,14 @@ package org.apache.spark.sql
package catalyst
package optimizer
-import types.IntegerType
-import util._
-import plans.logical.{LogicalPlan, LocalRelation}
-import rules._
-import expressions._
-import dsl.plans._
-import dsl.expressions._
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
+import org.apache.spark.sql.catalyst.rules.RuleExecutor
+import org.apache.spark.sql.catalyst.types.IntegerType
+
+// For implicit conversions
+import org.apache.spark.sql.catalyst.dsl.expressions._
class ConstantFoldingSuite extends OptimizerTest {
@@ -106,7 +107,7 @@ class ConstantFoldingSuite extends OptimizerTest {
Literal(5) + 'a as Symbol("c1"),
'a + Literal(2) + Literal(3) as Symbol("c2"),
Literal(2) * 'a + Literal(4) as Symbol("c3"),
- 'a * (Literal(7)) as Symbol("c4"))
+ 'a * Literal(7) as Symbol("c4"))
.analyze
comparePlans(optimized, correctAnswer)
@@ -173,4 +174,4 @@ class ConstantFoldingSuite extends OptimizerTest {
comparePlans(optimized, correctAnswer)
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index cd611b3fb3..cfbef53de1 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -2,13 +2,12 @@ package org.apache.spark.sql
package catalyst
package optimizer
-import expressions._
-import plans.logical._
-import rules._
-import util._
+import org.apache.spark.sql.catalyst.plans.logical._
+import org.apache.spark.sql.catalyst.rules._
-import dsl.plans._
-import dsl.expressions._
+/* Implicit conversions */
+import org.apache.spark.sql.catalyst.dsl.plans._
+import org.apache.spark.sql.catalyst.dsl.expressions._
class FilterPushdownSuite extends OptimizerTest {
@@ -219,4 +218,4 @@ class FilterPushdownSuite extends OptimizerTest {
comparePlans(optimized, optimizer.EliminateSubqueries(correctAnswer))
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
index 7b3653d0f9..8ec1d3d8c0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala
@@ -4,13 +4,9 @@ package optimizer
import org.scalatest.FunSuite
-import types.IntegerType
-import util._
-import plans.logical.{LogicalPlan, LocalRelation}
-import expressions._
-import dsl._
-
-/* Implicit conversions for creating query plans */
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.util._
/**
* Provides helper methods for comparing plans produced by optimization rules with the expected
@@ -41,4 +37,4 @@ class OptimizerTest extends FunSuite {
|${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
""".stripMargin)
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
index ff7c15b718..738cfa85fb 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/RuleExecutorSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
@@ -21,8 +21,8 @@ package trees
import org.scalatest.FunSuite
-import expressions._
-import rules._
+import org.apache.spark.sql.catalyst.expressions.{Expression, IntegerLiteral, Literal}
+import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
class RuleExecutorSuite extends FunSuite {
object DecrementLiterals extends Rule[Expression] {
@@ -54,4 +54,4 @@ class RuleExecutorSuite extends FunSuite {
assert(ToFixedPoint(Literal(100)) === Literal(90))
}
-} \ No newline at end of file
+}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
index 98bb090c29..1ddc41a731 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/TreeNodeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
@@ -15,18 +15,15 @@
* limitations under the License.
*/
-package org.apache.spark.sql
-package catalyst
-package trees
+package org.apache.spark.sql.catalyst.trees
import scala.collection.mutable.ArrayBuffer
-import expressions._
+import org.scalatest.FunSuite
-import org.scalatest.{FunSuite}
+import org.apache.spark.sql.catalyst.expressions._
class TreeNodeSuite extends FunSuite {
-
test("top node changed") {
val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
assert(after === Literal(2))
@@ -60,8 +57,8 @@ class TreeNodeSuite extends FunSuite {
val expected = Seq("+", "1", "*", "2", "-", "3", "4")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformDown {
- case b: BinaryExpression => {actual.append(b.symbol); b}
- case l: Literal => {actual.append(l.toString); l}
+ case b: BinaryExpression => actual.append(b.symbol); b
+ case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
@@ -72,10 +69,10 @@ class TreeNodeSuite extends FunSuite {
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformUp {
- case b: BinaryExpression => {actual.append(b.symbol); b}
- case l: Literal => {actual.append(l.toString); l}
+ case b: BinaryExpression => actual.append(b.symbol); b
+ case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
}
-} \ No newline at end of file
+}