aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-02-21 11:31:46 -0800
committerReynold Xin <rxin@databricks.com>2016-02-21 11:31:46 -0800
commit0947f0989b136841fa0601295fc09b36f16cb933 (patch)
treefd980f5deadf0fcbf463d0b534ad237cadcaa963 /sql
parent1a340da8d7590d831b040c74f5a6eb560e14d585 (diff)
downloadspark-0947f0989b136841fa0601295fc09b36f16cb933.tar.gz
spark-0947f0989b136841fa0601295fc09b36f16cb933.tar.bz2
spark-0947f0989b136841fa0601295fc09b36f16cb933.zip
[SPARK-13420][SQL] Rename Subquery logical plan to SubqueryAlias
## What changes were proposed in this pull request? This patch renames logical.Subquery to logical.SubqueryAlias, which is a more appropriate name for this operator (versus subqueries as expressions). ## How was the this patch tested? Unit tests. Author: Reynold Xin <rxin@databricks.com> Closes #11288 from rxin/SPARK-13420.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystQl.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala10
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala (renamed from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubQueriesSuite.scala)14
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala19
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinOrderSuite.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala5
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OuterJoinEliminationSuite.scala4
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationSuite.scala4
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala10
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala8
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala4
30 files changed, 83 insertions, 83 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystQl.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystQl.scala
index b16025a17e..069c665a39 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystQl.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystQl.scala
@@ -243,7 +243,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
queryArgs match {
case Token("TOK_CTE", ctes) :: Token("TOK_FROM", from) :: inserts =>
val cteRelations = ctes.map { node =>
- val relation = nodeToRelation(node).asInstanceOf[Subquery]
+ val relation = nodeToRelation(node).asInstanceOf[SubqueryAlias]
relation.alias -> relation
}
(Some(from.head), inserts, Some(cteRelations.toMap))
@@ -454,7 +454,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C
protected def nodeToRelation(node: ASTNode): LogicalPlan = {
node match {
case Token("TOK_SUBQUERY", query :: Token(alias, Nil) :: Nil) =>
- Subquery(cleanIdentifier(alias), nodeToPlan(query))
+ SubqueryAlias(cleanIdentifier(alias), nodeToPlan(query))
case Token(laterViewToken(isOuter), selectClause :: relationClause :: Nil) =>
nodeToGenerate(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index e153f4dd2b..562711a1b9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -118,7 +118,7 @@ class Analyzer(
// see https://github.com/apache/spark/pull/4929#discussion_r27186638 for more info
case u : UnresolvedRelation =>
val substituted = cteRelations.get(u.tableIdentifier.table).map { relation =>
- val withAlias = u.alias.map(Subquery(_, relation))
+ val withAlias = u.alias.map(SubqueryAlias(_, relation))
withAlias.getOrElse(relation)
}
substituted.getOrElse(u)
@@ -357,7 +357,7 @@ class Analyzer(
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case i @ InsertIntoTable(u: UnresolvedRelation, _, _, _, _) =>
- i.copy(table = EliminateSubQueries(getTable(u)))
+ i.copy(table = EliminateSubqueryAliases(getTable(u)))
case u: UnresolvedRelation =>
try {
getTable(u)
@@ -688,7 +688,7 @@ class Analyzer(
resolved
} else {
plan match {
- case u: UnaryNode if !u.isInstanceOf[Subquery] =>
+ case u: UnaryNode if !u.isInstanceOf[SubqueryAlias] =>
resolveExpressionRecursively(resolved, u.child)
case other => resolved
}
@@ -1372,12 +1372,12 @@ class Analyzer(
}
/**
- * Removes [[Subquery]] operators from the plan. Subqueries are only required to provide
+ * Removes [[SubqueryAlias]] operators from the plan. Subqueries are only required to provide
* scoping information for attributes and can be removed once analysis is complete.
*/
-object EliminateSubQueries extends Rule[LogicalPlan] {
+object EliminateSubqueryAliases extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
- case Subquery(_, child) => child
+ case SubqueryAlias(_, child) => child
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index f2f9ec5941..67edab55db 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -25,7 +25,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{CatalystConf, EmptyConf, TableIdentifier}
-import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
/**
* Thrown by a catalog when a table cannot be found. The analyzer will rethrow the exception
@@ -110,12 +110,12 @@ class SimpleCatalog(val conf: CatalystConf) extends Catalog {
if (table == null) {
throw new AnalysisException("Table not found: " + tableName)
}
- val tableWithQualifiers = Subquery(tableName, table)
+ val tableWithQualifiers = SubqueryAlias(tableName, table)
// If an alias was specified by the lookup, wrap the plan in a subquery so that attributes are
// properly qualified with this alias.
alias
- .map(a => Subquery(a, tableWithQualifiers))
+ .map(a => SubqueryAlias(a, tableWithQualifiers))
.getOrElse(tableWithQualifiers)
}
@@ -158,11 +158,11 @@ trait OverrideCatalog extends Catalog {
getOverriddenTable(tableIdent) match {
case Some(table) =>
val tableName = getTableName(tableIdent)
- val tableWithQualifiers = Subquery(tableName, table)
+ val tableWithQualifiers = SubqueryAlias(tableName, table)
// If an alias was specified by the lookup, wrap the plan in a sub-query so that attributes
// are properly qualified with this alias.
- alias.map(a => Subquery(a, tableWithQualifiers)).getOrElse(tableWithQualifiers)
+ alias.map(a => SubqueryAlias(a, tableWithQualifiers)).getOrElse(tableWithQualifiers)
case None => super.lookupRelation(tableIdent, alias)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index 5ac1984043..1a2ec7ed93 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -21,7 +21,7 @@ import java.sql.{Date, Timestamp}
import scala.language.implicitConversions
-import org.apache.spark.sql.catalyst.analysis.{EliminateSubQueries, UnresolvedAttribute, UnresolvedExtractValue}
+import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, UnresolvedAttribute, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
@@ -268,7 +268,7 @@ package object dsl {
Aggregate(groupingExprs, aliasedExprs, logicalPlan)
}
- def subquery(alias: Symbol): LogicalPlan = Subquery(alias.name, logicalPlan)
+ def subquery(alias: Symbol): LogicalPlan = SubqueryAlias(alias.name, logicalPlan)
def except(otherPlan: LogicalPlan): LogicalPlan = Except(logicalPlan, otherPlan)
@@ -290,7 +290,8 @@ package object dsl {
analysis.UnresolvedRelation(TableIdentifier(tableName)),
Map.empty, logicalPlan, overwrite, false)
- def analyze: LogicalPlan = EliminateSubQueries(analysis.SimpleAnalyzer.execute(logicalPlan))
+ def analyze: LogicalPlan =
+ EliminateSubqueryAliases(analysis.SimpleAnalyzer.execute(logicalPlan))
}
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala
index a8f5e1f63d..d0c44b0328 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/subquery.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.plans.QueryPlan
-import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.types.DataType
/**
@@ -56,7 +56,7 @@ case class ScalarSubquery(
exprId: ExprId = NamedExpression.newExprId)
extends SubqueryExpression with Unevaluable {
- override def plan: LogicalPlan = Subquery(toString, query)
+ override def plan: LogicalPlan = SubqueryAlias(toString, query)
override lazy val resolved: Boolean = query.resolved
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
index 202f6b5e07..1554382840 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.optimizer
import scala.collection.immutable.HashSet
-import org.apache.spark.sql.catalyst.analysis.{CleanupAliases, EliminateSubQueries}
+import org.apache.spark.sql.catalyst.analysis.{CleanupAliases, EliminateSubqueryAliases}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.Literal.{FalseLiteral, TrueLiteral}
@@ -40,7 +40,7 @@ abstract class Optimizer extends RuleExecutor[LogicalPlan] {
// However, because we also use the analyzer to canonicalized queries (for view definition),
// we do not eliminate subqueries or compute current time in the analyzer.
Batch("Finish Analysis", Once,
- EliminateSubQueries,
+ EliminateSubqueryAliases,
ComputeCurrentTime) ::
//////////////////////////////////////////////////////////////////////////////////////////
// Optimizer rules start here
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
index 86bd33f526..5e7d144ae4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.catalyst.plans
import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.plans.logical.Subquery
+import org.apache.spark.sql.catalyst.plans.logical.SubqueryAlias
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.types.{DataType, StructType}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index 35e0f5d563..35df2429db 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -128,8 +128,8 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
* can do better should override this function.
*/
def sameResult(plan: LogicalPlan): Boolean = {
- val cleanLeft = EliminateSubQueries(this)
- val cleanRight = EliminateSubQueries(plan)
+ val cleanLeft = EliminateSubqueryAliases(this)
+ val cleanRight = EliminateSubqueryAliases(plan)
cleanLeft.getClass == cleanRight.getClass &&
cleanLeft.children.size == cleanRight.children.size && {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index a19ba38ba0..70ecbce829 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -350,7 +350,7 @@ case class InsertIntoTable(
* key is the alias of the CTE definition,
* value is the CTE definition.
*/
-case class With(child: LogicalPlan, cteRelations: Map[String, Subquery]) extends UnaryNode {
+case class With(child: LogicalPlan, cteRelations: Map[String, SubqueryAlias]) extends UnaryNode {
override def output: Seq[Attribute] = child.output
}
@@ -623,7 +623,7 @@ case class LocalLimit(limitExpr: Expression, child: LogicalPlan) extends UnaryNo
}
}
-case class Subquery(alias: String, child: LogicalPlan) extends UnaryNode {
+case class SubqueryAlias(alias: String, child: LogicalPlan) extends UnaryNode {
override def output: Seq[Attribute] = child.output.map(_.withQualifiers(alias :: Nil))
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala
index ed7121831a..42147f516f 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala
@@ -51,7 +51,7 @@ class CatalystQlSuite extends PlanTest {
val parsed2 = parser.parsePlan("SELECT * FROM t0 UNION DISTINCT SELECT * FROM t1")
val expected =
Project(UnresolvedAlias(UnresolvedStar(None)) :: Nil,
- Subquery("u_1",
+ SubqueryAlias("u_1",
Distinct(
Union(
Project(UnresolvedAlias(UnresolvedStar(None)) :: Nil,
@@ -66,7 +66,7 @@ class CatalystQlSuite extends PlanTest {
val parsed = parser.parsePlan("SELECT * FROM t0 UNION ALL SELECT * FROM t1")
val expected =
Project(UnresolvedAlias(UnresolvedStar(None)) :: Nil,
- Subquery("u_1",
+ SubqueryAlias("u_1",
Union(
Project(UnresolvedAlias(UnresolvedStar(None)) :: Nil,
UnresolvedRelation(TableIdentifier("t0"), None)),
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 92db02944c..aa1d2b0861 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -344,8 +344,8 @@ class AnalysisSuite extends AnalysisTest {
val query =
Project(Seq($"x.key", $"y.key"),
Join(
- Project(Seq($"x.key"), Subquery("x", input)),
- Project(Seq($"y.key"), Subquery("y", input)),
+ Project(Seq($"x.key"), SubqueryAlias("x", input)),
+ Project(Seq($"y.key"), SubqueryAlias("y", input)),
Inner, None))
assertAnalysisSuccess(query)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index 2756c463cf..e0a95ba8bb 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -35,10 +35,10 @@ trait AnalysisTest extends PlanTest {
caseInsensitiveCatalog.registerTable(TableIdentifier("TaBlE"), TestRelations.testRelation)
new Analyzer(caseSensitiveCatalog, EmptyFunctionRegistry, caseSensitiveConf) {
- override val extendedResolutionRules = EliminateSubQueries :: Nil
+ override val extendedResolutionRules = EliminateSubqueryAliases :: Nil
} ->
new Analyzer(caseInsensitiveCatalog, EmptyFunctionRegistry, caseInsensitiveConf) {
- override val extendedResolutionRules = EliminateSubQueries :: Nil
+ override val extendedResolutionRules = EliminateSubqueryAliases :: Nil
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
index 6932f185b9..3e52441519 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -31,7 +31,7 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("AnalysisNodes", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Constant Folding", FixedPoint(50),
NullPropagation,
ConstantFolding,
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala
index f5fd5ca6be..833f054659 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CollapseProjectSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.Rand
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor
class CollapseProjectSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
- Batch("Subqueries", FixedPoint(10), EliminateSubQueries) ::
+ Batch("Subqueries", FixedPoint(10), EliminateSubqueryAliases) ::
Batch("CollapseProject", Once, CollapseProject) :: Nil
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index 48f9ac77b7..641c89873d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -17,8 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.{EliminateSubQueries, UnresolvedExtractValue}
-// For implicit conversions
+import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, UnresolvedExtractValue}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
@@ -32,7 +31,7 @@ class ConstantFoldingSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("AnalysisNodes", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("ConstantFolding", Once,
OptimizeIn,
ConstantFolding,
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubQueriesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala
index e0d430052f..9b6d68aee8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubQueriesSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.analysis
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
@@ -28,10 +28,10 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
-class EliminateSubQueriesSuite extends PlanTest with PredicateHelper {
+class EliminateSubqueryAliasesSuite extends PlanTest with PredicateHelper {
object Optimize extends RuleExecutor[LogicalPlan] {
- val batches = Batch("EliminateSubQueries", Once, EliminateSubQueries) :: Nil
+ val batches = Batch("EliminateSubqueryAliases", Once, EliminateSubqueryAliases) :: Nil
}
private def assertEquivalent(e1: Expression, e2: Expression): Unit = {
@@ -46,13 +46,13 @@ class EliminateSubQueriesSuite extends PlanTest with PredicateHelper {
test("eliminate top level subquery") {
val input = LocalRelation('a.int, 'b.int)
- val query = Subquery("a", input)
+ val query = SubqueryAlias("a", input)
comparePlans(afterOptimization(query), input)
}
test("eliminate mid-tree subquery") {
val input = LocalRelation('a.int, 'b.int)
- val query = Filter(TrueLiteral, Subquery("a", input))
+ val query = Filter(TrueLiteral, SubqueryAlias("a", input))
comparePlans(
afterOptimization(query),
Filter(TrueLiteral, LocalRelation('a.int, 'b.int)))
@@ -60,10 +60,10 @@ class EliminateSubQueriesSuite extends PlanTest with PredicateHelper {
test("eliminate multiple subqueries") {
val input = LocalRelation('a.int, 'b.int)
- val query = Filter(TrueLiteral, Subquery("c", Subquery("b", Subquery("a", input))))
+ val query = Filter(TrueLiteral,
+ SubqueryAlias("c", SubqueryAlias("b", SubqueryAlias("a", input))))
comparePlans(
afterOptimization(query),
Filter(TrueLiteral, LocalRelation('a.int, 'b.int)))
}
-
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index b49ca928b6..7805723ec8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.analysis
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
@@ -32,7 +32,7 @@ class FilterPushdownSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Filter Pushdown", Once,
SamplePushDown,
CombineFilters,
@@ -481,7 +481,7 @@ class FilterPushdownSuite extends PlanTest {
}
val optimized = Optimize.execute(originalQuery.analyze)
- comparePlans(analysis.EliminateSubQueries(originalQuery.analyze), optimized)
+ comparePlans(analysis.EliminateSubqueryAliases(originalQuery.analyze), optimized)
}
test("joins: conjunctive predicates") {
@@ -500,7 +500,7 @@ class FilterPushdownSuite extends PlanTest {
left.join(right, condition = Some("x.b".attr === "y.b".attr))
.analyze
- comparePlans(optimized, analysis.EliminateSubQueries(correctAnswer))
+ comparePlans(optimized, analysis.EliminateSubqueryAliases(correctAnswer))
}
test("joins: conjunctive predicates #2") {
@@ -519,7 +519,7 @@ class FilterPushdownSuite extends PlanTest {
left.join(right, condition = Some("x.b".attr === "y.b".attr))
.analyze
- comparePlans(optimized, analysis.EliminateSubQueries(correctAnswer))
+ comparePlans(optimized, analysis.EliminateSubqueryAliases(correctAnswer))
}
test("joins: conjunctive predicates #3") {
@@ -543,7 +543,7 @@ class FilterPushdownSuite extends PlanTest {
condition = Some("z.a".attr === "x.b".attr))
.analyze
- comparePlans(optimized, analysis.EliminateSubQueries(correctAnswer))
+ comparePlans(optimized, analysis.EliminateSubqueryAliases(correctAnswer))
}
val testRelationWithArrayType = LocalRelation('a.int, 'b.int, 'c_arr.array(IntegerType))
@@ -619,7 +619,7 @@ class FilterPushdownSuite extends PlanTest {
x.select('a)
.sortBy(SortOrder('a, Ascending)).analyze
- comparePlans(optimized, analysis.EliminateSubQueries(correctAnswer))
+ comparePlans(optimized, analysis.EliminateSubqueryAliases(correctAnswer))
// push down invalid
val originalQuery1 = {
@@ -634,7 +634,7 @@ class FilterPushdownSuite extends PlanTest {
.sortBy(SortOrder('a, Ascending))
.select('b).analyze
- comparePlans(optimized1, analysis.EliminateSubQueries(correctAnswer1))
+ comparePlans(optimized1, analysis.EliminateSubqueryAliases(correctAnswer1))
}
test("push project and filter down into sample") {
@@ -642,7 +642,8 @@ class FilterPushdownSuite extends PlanTest {
val originalQuery =
Sample(0.0, 0.6, false, 11L, x).select('a)
- val originalQueryAnalyzed = EliminateSubQueries(analysis.SimpleAnalyzer.execute(originalQuery))
+ val originalQueryAnalyzed =
+ EliminateSubqueryAliases(analysis.SimpleAnalyzer.execute(originalQuery))
val optimized = Optimize.execute(originalQueryAnalyzed)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinOrderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinOrderSuite.scala
index 858a0d8fde..a5b487bcc8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinOrderSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/JoinOrderSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.analysis
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.Expression
@@ -33,7 +33,7 @@ class JoinOrderSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Filter Pushdown", Once,
CombineFilters,
PushPredicateThroughProject,
@@ -90,6 +90,6 @@ class JoinOrderSuite extends PlanTest {
.join(y, condition = Some("y.d".attr === "z.a".attr))
.analyze
- comparePlans(optimized, analysis.EliminateSubQueries(correctAnswer))
+ comparePlans(optimized, analysis.EliminateSubqueryAliases(correctAnswer))
}
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala
index fc1e994581..dcbc79365c 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.Add
@@ -30,7 +30,7 @@ class LimitPushdownSuite extends PlanTest {
private object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Limit pushdown", FixedPoint(100),
LimitPushDown,
CombineLimits,
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
index 3e384e473e..17255ecfe8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
@@ -19,8 +19,7 @@ package org.apache.spark.sql.catalyst.optimizer
import scala.collection.immutable.HashSet
-import org.apache.spark.sql.catalyst.analysis.{EliminateSubQueries, UnresolvedAttribute}
-// For implicit conversions
+import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, UnresolvedAttribute}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
@@ -34,7 +33,7 @@ class OptimizeInSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("AnalysisNodes", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("ConstantFolding", FixedPoint(10),
NullPropagation,
ConstantFolding,
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OuterJoinEliminationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OuterJoinEliminationSuite.scala
index a1dc836a5f..5e6e54dc74 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OuterJoinEliminationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OuterJoinEliminationSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.plans._
@@ -28,7 +28,7 @@ class OuterJoinEliminationSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Outer Join Elimination", Once,
OuterJoinElimination,
PushPredicateThroughJoin) :: Nil
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationSuite.scala
index b8ea32b4df..50f3b512d9 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SetOperationSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -28,7 +28,7 @@ class SetOperationSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
- EliminateSubQueries) ::
+ EliminateSubqueryAliases) ::
Batch("Union Pushdown", Once,
CombineUnions,
SetOperationPushDown,
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index 9674450118..e3412f7a2e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -671,7 +671,7 @@ class DataFrame private[sql](
* @since 1.3.0
*/
def as(alias: String): DataFrame = withPlan {
- Subquery(alias, logicalPlan)
+ SubqueryAlias(alias, logicalPlan)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 378763268a..ea7e7255ab 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -142,7 +142,7 @@ class Dataset[T] private[sql](
* the same name after two Datasets have been joined.
* @since 1.6.0
*/
- def as(alias: String): Dataset[T] = withPlan(Subquery(alias, _))
+ def as(alias: String): Dataset[T] = withPlan(SubqueryAlias(alias, _))
/**
* Converts this strongly typed collection of data to generic Dataframe. In contrast to the
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index 9358c9c37b..2e41e88392 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -40,7 +40,7 @@ private[sql] class ResolveDataSource(sqlContext: SQLContext) extends Rule[Logica
provider = u.tableIdentifier.database.get,
options = Map("path" -> u.tableIdentifier.table))
val plan = LogicalRelation(resolved.relation)
- u.alias.map(a => Subquery(u.alias.get, plan)).getOrElse(plan)
+ u.alias.map(a => SubqueryAlias(u.alias.get, plan)).getOrElse(plan)
} catch {
case e: ClassNotFoundException => u
case e: Exception =>
@@ -171,7 +171,7 @@ private[sql] case class PreWriteCheck(catalog: Catalog) extends (LogicalPlan =>
// the query. If so, we will throw an AnalysisException to let users know it is not allowed.
if (c.mode == SaveMode.Overwrite && catalog.tableExists(c.tableIdent)) {
// Need to remove SubQuery operator.
- EliminateSubQueries(catalog.lookupRelation(c.tableIdent)) match {
+ EliminateSubqueryAliases(catalog.lookupRelation(c.tableIdent)) match {
// Only do the check if the table is a data source table
// (the relation is a BaseRelation).
case l @ LogicalRelation(dest: BaseRelation, _, _) =>
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index ac174aa6bf..88fda16ca0 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -353,7 +353,7 @@ class HiveContext private[hive](
*/
def analyze(tableName: String) {
val tableIdent = sqlParser.parseTableIdentifier(tableName)
- val relation = EliminateSubQueries(catalog.lookupRelation(tableIdent))
+ val relation = EliminateSubqueryAliases(catalog.lookupRelation(tableIdent))
relation match {
case relation: MetastoreRelation =>
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 61d0d6759f..c222b006a0 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -416,17 +416,17 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
if (table.properties.get("spark.sql.sources.provider").isDefined) {
val dataSourceTable = cachedDataSourceTables(qualifiedTableName)
- val tableWithQualifiers = Subquery(qualifiedTableName.name, dataSourceTable)
+ val tableWithQualifiers = SubqueryAlias(qualifiedTableName.name, dataSourceTable)
// Then, if alias is specified, wrap the table with a Subquery using the alias.
// Otherwise, wrap the table with a Subquery using the table name.
- alias.map(a => Subquery(a, tableWithQualifiers)).getOrElse(tableWithQualifiers)
+ alias.map(a => SubqueryAlias(a, tableWithQualifiers)).getOrElse(tableWithQualifiers)
} else if (table.tableType == VirtualView) {
val viewText = table.viewText.getOrElse(sys.error("Invalid view without text."))
alias match {
// because hive use things like `_c0` to build the expanded text
// currently we cannot support view from "create view v1(c1) as ..."
- case None => Subquery(table.name, hive.parseSql(viewText))
- case Some(aliasText) => Subquery(aliasText, hive.parseSql(viewText))
+ case None => SubqueryAlias(table.name, hive.parseSql(viewText))
+ case Some(aliasText) => SubqueryAlias(aliasText, hive.parseSql(viewText))
}
} else {
MetastoreRelation(qualifiedTableName.database, qualifiedTableName.name, alias)(table)(hive)
@@ -564,7 +564,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
case relation: MetastoreRelation if hive.convertMetastoreParquet &&
relation.tableDesc.getSerdeClassName.toLowerCase.contains("parquet") =>
val parquetRelation = convertToParquetRelation(relation)
- Subquery(relation.alias.getOrElse(relation.tableName), parquetRelation)
+ SubqueryAlias(relation.alias.getOrElse(relation.tableName), parquetRelation)
}
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala
index 1dda39d44e..32f17f41c9 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala
@@ -102,14 +102,14 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi
val childrenSql = p.children.map(toSQL(_))
childrenSql.mkString(" UNION ALL ")
- case p: Subquery =>
+ case p: SubqueryAlias =>
p.child match {
// Persisted data source relation
case LogicalRelation(_, _, Some(TableIdentifier(table, Some(database)))) =>
s"${quoteIdentifier(database)}.${quoteIdentifier(table)}"
// Parentheses is not used for persisted data source relations
// e.g., select x.c1 from (t1) as x inner join (t1) as y on x.c1 = y.c1
- case Subquery(_, _: LogicalRelation | _: MetastoreRelation) =>
+ case SubqueryAlias(_, _: LogicalRelation | _: MetastoreRelation) =>
build(toSQL(p.child), "AS", p.alias)
case _ =>
build("(" + toSQL(p.child) + ")", "AS", p.alias)
@@ -215,7 +215,7 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi
wrapChildWithSubquery(plan)
case plan @ Project(_,
- _: Subquery
+ _: SubqueryAlias
| _: Filter
| _: Join
| _: MetastoreRelation
@@ -237,7 +237,7 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi
a.withQualifiers(alias :: Nil)
}.asInstanceOf[NamedExpression])
- Project(aliasedProjectList, Subquery(alias, child))
+ Project(aliasedProjectList, SubqueryAlias(alias, child))
}
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala
index e703ac0164..246b52a3b0 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala
@@ -21,7 +21,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.util._
@@ -220,7 +220,7 @@ case class CreateMetastoreDataSourceAsSelect(
provider,
optionsWithPath)
val createdRelation = LogicalRelation(resolved.relation)
- EliminateSubQueries(sqlContext.catalog.lookupRelation(tableIdent)) match {
+ EliminateSubqueryAliases(sqlContext.catalog.lookupRelation(tableIdent)) match {
case l @ LogicalRelation(_: InsertableRelation | _: HadoopFsRelation, _, _) =>
if (l.relation != createdRelation.relation) {
val errorDescription =
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 75fa09db69..6624494e08 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.{EliminateSubQueries, FunctionRegistry}
+import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.parquet.ParquetRelation
import org.apache.spark.sql.functions._
@@ -264,7 +264,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
test("CTAS without serde") {
def checkRelation(tableName: String, isDataSourceParquet: Boolean): Unit = {
- val relation = EliminateSubQueries(catalog.lookupRelation(TableIdentifier(tableName)))
+ val relation = EliminateSubqueryAliases(catalog.lookupRelation(TableIdentifier(tableName)))
relation match {
case LogicalRelation(r: ParquetRelation, _, _) =>
if (!isDataSourceParquet) {