aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-08-16 23:09:53 -0700
committerReynold Xin <rxin@databricks.com>2016-08-16 23:09:53 -0700
commitf7c9ff57c17a950cccdc26aadf8768c899a4d572 (patch)
tree6ff7233e26d192dac7d1af38708707e74d84401a /sql/core
parent4a2c375be2bcd98cc7e00bea920fd6a0f68a4e14 (diff)
downloadspark-f7c9ff57c17a950cccdc26aadf8768c899a4d572.tar.gz
spark-f7c9ff57c17a950cccdc26aadf8768c899a4d572.tar.bz2
spark-f7c9ff57c17a950cccdc26aadf8768c899a4d572.zip
[SPARK-17068][SQL] Make view-usage visible during analysis
## What changes were proposed in this pull request? This PR adds a field to subquery alias in order to make the usage of views in a resolved `LogicalPlan` more visible (and more understandable). For example, the following view and query: ```sql create view constants as select 1 as id union all select 1 union all select 42 select * from constants; ``` ...now yields the following analyzed plan: ``` Project [id#39] +- SubqueryAlias c, `default`.`constants` +- Project [gen_attr_0#36 AS id#39] +- SubqueryAlias gen_subquery_0 +- Union :- Union : :- Project [1 AS gen_attr_0#36] : : +- OneRowRelation$ : +- Project [1 AS gen_attr_1#37] : +- OneRowRelation$ +- Project [42 AS gen_attr_2#38] +- OneRowRelation$ ``` ## How was this patch tested? Added tests for the two code paths in `SessionCatalogSuite` (sql/core) and `HiveMetastoreCatalogSuite` (sql/hive) Author: Herman van Hovell <hvanhovell@databricks.com> Closes #14657 from hvanhovell/SPARK-17068.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala2
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala6
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala2
3 files changed, 5 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index c119df83b3..6da99ce0dd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -967,7 +967,7 @@ class Dataset[T] private[sql](
* @since 1.6.0
*/
def as(alias: String): Dataset[T] = withTypedPlan {
- SubqueryAlias(alias, logicalPlan)
+ SubqueryAlias(alias, logicalPlan, None)
}
/**
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala
index 5d93419f35..ff8e0f2642 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala
@@ -75,7 +75,7 @@ class SQLBuilder private (
val aliasedOutput = canonicalizedPlan.output.zip(outputNames).map {
case (attr, name) => Alias(attr.withQualifier(None), name)()
}
- val finalPlan = Project(aliasedOutput, SubqueryAlias(finalName, canonicalizedPlan))
+ val finalPlan = Project(aliasedOutput, SubqueryAlias(finalName, canonicalizedPlan, None))
try {
val replaced = finalPlan.transformAllExpressions {
@@ -440,7 +440,7 @@ class SQLBuilder private (
object RemoveSubqueriesAboveSQLTable extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan transformUp {
- case SubqueryAlias(_, t @ ExtractSQLTable(_)) => t
+ case SubqueryAlias(_, t @ ExtractSQLTable(_), _) => t
}
}
@@ -557,7 +557,7 @@ class SQLBuilder private (
}
private def addSubquery(plan: LogicalPlan): SubqueryAlias = {
- SubqueryAlias(newSubqueryName(), plan)
+ SubqueryAlias(newSubqueryName(), plan, None)
}
private def addSubqueryIfNeeded(plan: LogicalPlan): LogicalPlan = plan match {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index fc8d8c3667..5eb2f0a9ff 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -55,7 +55,7 @@ class ResolveDataSource(sparkSession: SparkSession) extends Rule[LogicalPlan] {
s"${u.tableIdentifier.database.get}")
}
val plan = LogicalRelation(dataSource.resolveRelation())
- u.alias.map(a => SubqueryAlias(u.alias.get, plan)).getOrElse(plan)
+ u.alias.map(a => SubqueryAlias(u.alias.get, plan, None)).getOrElse(plan)
} catch {
case e: ClassNotFoundException => u
case e: Exception =>