aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala2
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala9
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala3
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala14
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala2
12 files changed, 24 insertions, 24 deletions
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 22eb3ec984..d747d4f83f 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -1853,7 +1853,7 @@ test_that("approxQuantile() on a DataFrame", {
test_that("SQL error message is returned from JVM", {
retError <- tryCatch(sql(sqlContext, "select * from blah"), error = function(e) e)
- expect_equal(grepl("Table not found", retError), TRUE)
+ expect_equal(grepl("Table or View not found", retError), TRUE)
expect_equal(grepl("blah", retError), TRUE)
})
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 3555a6d7fa..de40ddde1b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -409,7 +409,7 @@ class Analyzer(
catalog.lookupRelation(u.tableIdentifier, u.alias)
} catch {
case _: NoSuchTableException =>
- u.failAnalysis(s"Table not found: ${u.tableName}")
+ u.failAnalysis(s"Table or View not found: ${u.tableName}")
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 4880502398..d6a8c3eec8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -52,7 +52,7 @@ trait CheckAnalysis {
case p if p.analyzed => // Skip already analyzed sub-plans
case u: UnresolvedRelation =>
- u.failAnalysis(s"Table not found: ${u.tableIdentifier}")
+ u.failAnalysis(s"Table or View not found: ${u.tableIdentifier}")
case operator: LogicalPlan =>
operator transformExpressionsUp {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index e9f04eecf8..96fd1a027e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -33,7 +33,7 @@ class NoSuchDatabaseException(db: String) extends NoSuchItemException {
}
class NoSuchTableException(db: String, table: String) extends NoSuchItemException {
- override def getMessage: String = s"Table $table not found in database $db"
+ override def getMessage: String = s"Table or View $table not found in database $db"
}
class NoSuchPartitionException(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index 1994acd1ad..f8a6fb74cc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -62,7 +62,7 @@ class InMemoryCatalog extends ExternalCatalog {
private def requireTableExists(db: String, table: String): Unit = {
if (!tableExists(db, table)) {
throw new AnalysisException(
- s"Table not found: '$table' does not exist in database '$db'")
+ s"Table or View not found: '$table' does not exist in database '$db'")
}
}
@@ -164,7 +164,7 @@ class InMemoryCatalog extends ExternalCatalog {
catalog(db).tables.remove(table)
} else {
if (!ignoreIfNotExists) {
- throw new AnalysisException(s"Table '$table' does not exist in database '$db'")
+ throw new AnalysisException(s"Table or View '$table' does not exist in database '$db'")
}
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index c1e5a485e7..34e1cb7315 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -242,11 +242,11 @@ class SessionCatalog(
val table = formatTableName(name.table)
if (name.database.isDefined || !tempTables.contains(table)) {
// When ignoreIfNotExists is false, no exception is issued when the table does not exist.
- // Instead, log it as an error message. This is consistent with Hive.
+ // Instead, log it as an error message.
if (externalCatalog.tableExists(db, table)) {
externalCatalog.dropTable(db, table, ignoreIfNotExists = true)
} else if (!ignoreIfNotExists) {
- logError(s"Table '${name.quotedString}' does not exist")
+ logError(s"Table or View '${name.quotedString}' does not exist")
}
} else {
tempTables.remove(table)
@@ -305,11 +305,6 @@ class SessionCatalog(
}
/**
- * Return whether View is supported
- */
- def isViewSupported: Boolean = false
-
- /**
* List all tables in the specified database, including temporary tables.
*/
def listTables(db: String): Seq[TableIdentifier] = listTables(db, "*")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index e941736f9a..8a37cf8f4c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -191,9 +191,6 @@ case class DropTable(
override def run(sqlContext: SQLContext): Seq[Row] = {
val catalog = sqlContext.sessionState.catalog
- if (isView && !catalog.isViewSupported) {
- throw new AnalysisException(s"Not supported object: views")
- }
// If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view
// issue an exception.
catalog.getTableMetadataOption(tableName).map(_.tableType match {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 695dda269a..cdd404d699 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1827,12 +1827,12 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
val e1 = intercept[AnalysisException] {
sql("select * from in_valid_table")
}
- assert(e1.message.contains("Table not found"))
+ assert(e1.message.contains("Table or View not found"))
val e2 = intercept[AnalysisException] {
sql("select * from no_db.no_table").show()
}
- assert(e2.message.contains("Table not found"))
+ assert(e2.message.contains("Table or View not found"))
val e3 = intercept[AnalysisException] {
sql("select * from json.invalid_file")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index e75e5f5cb2..c6479bf33e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -432,11 +432,21 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
sql("DROP TABLE dbx.tab1")
}
- test("drop view") {
+ test("drop view in SQLContext") {
+ // SQLContext does not support create view. Log an error message, if tab1 does not exists
+ sql("DROP VIEW tab1")
+
+ val catalog = sqlContext.sessionState.catalog
+ val tableIdent = TableIdentifier("tab1", Some("dbx"))
+ createDatabase(catalog, "dbx")
+ createTable(catalog, tableIdent)
+ assert(catalog.listTables("dbx") == Seq(tableIdent))
+
val e = intercept[AnalysisException] {
sql("DROP VIEW dbx.tab1")
}
- assert(e.getMessage.contains("Not supported object: views"))
+ assert(
+ e.getMessage.contains("Cannot drop a table with DROP VIEW. Please use DROP TABLE instead"))
}
private def convertToDatasourceTable(
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 9ec8b9a9a6..bfc3d195ff 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -230,7 +230,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging {
runCliWithin(timeout = 2.minute,
errorResponses = Seq("AnalysisException"))(
"select * from nonexistent_table;"
- -> "Error in query: Table not found: nonexistent_table;"
+ -> "Error in query: Table or View not found: nonexistent_table;"
)
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 875652c226..0cccc22e5a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -70,8 +70,6 @@ private[sql] class HiveSessionCatalog(
}
}
- override def isViewSupported: Boolean = true
-
// ----------------------------------------------------------------
// | Methods and fields for interacting with HiveMetastoreCatalog |
// ----------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index 8de2bdcfc0..061d1512a5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -96,7 +96,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
val message1 = intercept[AnalysisException] {
sql("SHOW TBLPROPERTIES badtable")
}.getMessage
- assert(message1.contains("Table badtable not found in database default"))
+ assert(message1.contains("Table or View badtable not found in database default"))
// When key is not found, a row containing the error is returned.
checkAnswer(