aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorjiangxingbo <jiangxb1987@gmail.com>2017-02-15 10:46:54 -0800
committerWenchen Fan <wenchen@databricks.com>2017-02-15 10:47:11 -0800
commit3755da76c3821e8e6a4f359c43243a51a06559ca (patch)
treedb041a112a3744b24e7ecbb036f7b8b69015cc2e /sql/catalyst
parent671bc08ed502815bfa2254c30d64149402acb0c7 (diff)
downloadspark-3755da76c3821e8e6a4f359c43243a51a06559ca.tar.gz
spark-3755da76c3821e8e6a4f359c43243a51a06559ca.tar.bz2
spark-3755da76c3821e8e6a4f359c43243a51a06559ca.zip
[SPARK-19331][SQL][TESTS] Improve the test coverage of SQLViewSuite
Move `SQLViewSuite` from `sql/hive` to `sql/core`, so we can test the view supports without hive metastore. Also moved the test cases that specified to hive to `HiveSQLViewSuite`. Improve the test coverage of SQLViewSuite, cover the following cases: 1. view resolution(possibly a referenced table/view have changed after the view creation); 2. handle a view with user specified column names; 3. improve the test cases for a nested view. Also added a test case for cyclic view reference, which is a known issue that is not fixed yet. N/A Author: jiangxingbo <jiangxb1987@gmail.com> Closes #16674 from jiangxb1987/view-test.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala11
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala2
2 files changed, 9 insertions, 4 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 6aa0e8d8d3..cd517a98ac 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -620,13 +620,18 @@ class Analyzer(
private def lookupTableFromCatalog(
u: UnresolvedRelation,
defaultDatabase: Option[String] = None): LogicalPlan = {
+ val tableIdentWithDb = u.tableIdentifier.copy(
+ database = u.tableIdentifier.database.orElse(defaultDatabase))
try {
- val tableIdentWithDb = u.tableIdentifier.copy(
- database = u.tableIdentifier.database.orElse(defaultDatabase))
catalog.lookupRelation(tableIdentWithDb, u.alias)
} catch {
case _: NoSuchTableException =>
- u.failAnalysis(s"Table or view not found: ${u.tableName}")
+ u.failAnalysis(s"Table or view not found: ${tableIdentWithDb.unquotedString}")
+ // If the database is defined and that database is not found, throw an AnalysisException.
+ // Note that if the database is not defined, it is possible we are looking up a temp view.
+ case e: NoSuchDatabaseException =>
+ u.failAnalysis(s"Table or view not found: ${tableIdentWithDb.unquotedString}, the " +
+ s"database ${e.db} doesn't exsits.")
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index 8febdcaee8..f5aae60431 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
* Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception
* as an [[org.apache.spark.sql.AnalysisException]] with the correct position information.
*/
-class NoSuchDatabaseException(db: String) extends AnalysisException(s"Database '$db' not found")
+class NoSuchDatabaseException(val db: String) extends AnalysisException(s"Database '$db' not found")
class NoSuchTableException(db: String, table: String)
extends AnalysisException(s"Table or view '$table' not found in database '$db'")