aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-02-21 11:31:46 -0800
committerReynold Xin <rxin@databricks.com>2016-02-21 11:31:46 -0800
commit0947f0989b136841fa0601295fc09b36f16cb933 (patch)
treefd980f5deadf0fcbf463d0b534ad237cadcaa963 /sql/core
parent1a340da8d7590d831b040c74f5a6eb560e14d585 (diff)
downloadspark-0947f0989b136841fa0601295fc09b36f16cb933.tar.gz
spark-0947f0989b136841fa0601295fc09b36f16cb933.tar.bz2
spark-0947f0989b136841fa0601295fc09b36f16cb933.zip
[SPARK-13420][SQL] Rename Subquery logical plan to SubqueryAlias
## What changes were proposed in this pull request? This patch renames logical.Subquery to logical.SubqueryAlias, which is a more appropriate name for this operator (versus subqueries as expressions). ## How was the this patch tested? Unit tests. Author: Reynold Xin <rxin@databricks.com> Closes #11288 from rxin/SPARK-13420.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala4
3 files changed, 4 insertions, 4 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
index 9674450118..e3412f7a2e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala
@@ -671,7 +671,7 @@ class DataFrame private[sql](
* @since 1.3.0
*/
def as(alias: String): DataFrame = withPlan {
- Subquery(alias, logicalPlan)
+ SubqueryAlias(alias, logicalPlan)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 378763268a..ea7e7255ab 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -142,7 +142,7 @@ class Dataset[T] private[sql](
* the same name after two Datasets have been joined.
* @since 1.6.0
*/
- def as(alias: String): Dataset[T] = withPlan(Subquery(alias, _))
+ def as(alias: String): Dataset[T] = withPlan(SubqueryAlias(alias, _))
/**
* Converts this strongly typed collection of data to generic Dataframe. In contrast to the
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index 9358c9c37b..2e41e88392 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -40,7 +40,7 @@ private[sql] class ResolveDataSource(sqlContext: SQLContext) extends Rule[Logica
provider = u.tableIdentifier.database.get,
options = Map("path" -> u.tableIdentifier.table))
val plan = LogicalRelation(resolved.relation)
- u.alias.map(a => Subquery(u.alias.get, plan)).getOrElse(plan)
+ u.alias.map(a => SubqueryAlias(u.alias.get, plan)).getOrElse(plan)
} catch {
case e: ClassNotFoundException => u
case e: Exception =>
@@ -171,7 +171,7 @@ private[sql] case class PreWriteCheck(catalog: Catalog) extends (LogicalPlan =>
// the query. If so, we will throw an AnalysisException to let users know it is not allowed.
if (c.mode == SaveMode.Overwrite && catalog.tableExists(c.tableIdent)) {
// Need to remove SubQuery operator.
- EliminateSubQueries(catalog.lookupRelation(c.tableIdent)) match {
+ EliminateSubqueryAliases(catalog.lookupRelation(c.tableIdent)) match {
// Only do the check if the table is a data source table
// (the relation is a BaseRelation).
case l @ LogicalRelation(dest: BaseRelation, _, _) =>