aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala28
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala8
2 files changed, 31 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index aebb663df5..0b8e53868c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.internal
import scala.reflect.runtime.universe.TypeTag
+import scala.util.control.NonFatal
import org.apache.spark.annotation.Experimental
import org.apache.spark.sql._
@@ -98,14 +99,27 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
CatalogImpl.makeDataset(tables, sparkSession)
}
+ /**
+ * Returns a Table for the given table/view or temporary view.
+ *
+ * Note that this function requires the table already exists in the Catalog.
+ *
+ * If the table metadata retrieval failed due to any reason (e.g., table serde class
+ * is not accessible or the table type is not accepted by Spark SQL), this function
+ * still returns the corresponding Table without the description and tableType)
+ */
private def makeTable(tableIdent: TableIdentifier): Table = {
- val metadata = sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent)
+ val metadata = try {
+ Some(sessionCatalog.getTempViewOrPermanentTableMetadata(tableIdent))
+ } catch {
+ case NonFatal(_) => None
+ }
val isTemp = sessionCatalog.isTemporaryTable(tableIdent)
new Table(
name = tableIdent.table,
- database = metadata.identifier.database.orNull,
- description = metadata.comment.orNull,
- tableType = if (isTemp) "TEMPORARY" else metadata.tableType.name,
+ database = metadata.map(_.identifier.database).getOrElse(tableIdent.database).orNull,
+ description = metadata.map(_.comment.orNull).orNull,
+ tableType = if (isTemp) "TEMPORARY" else metadata.map(_.tableType.name).orNull,
isTemporary = isTemp)
}
@@ -197,7 +211,11 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
* `AnalysisException` when no `Table` can be found.
*/
override def getTable(dbName: String, tableName: String): Table = {
- makeTable(TableIdentifier(tableName, Option(dbName)))
+ if (tableExists(dbName, tableName)) {
+ makeTable(TableIdentifier(tableName, Option(dbName)))
+ } else {
+ throw new AnalysisException(s"Table or view '$tableName' not found in database '$dbName'")
+ }
}
/**
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 3906968aaf..16a99321ba 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -1197,6 +1197,14 @@ class HiveDDLSuite
s"CREATE INDEX $indexName ON TABLE $tabName (a) AS 'COMPACT' WITH DEFERRED REBUILD")
val indexTabName =
spark.sessionState.catalog.listTables("default", s"*$indexName*").head.table
+
+ // Even if index tables exist, listTables and getTable APIs should still work
+ checkAnswer(
+ spark.catalog.listTables().toDF(),
+ Row(indexTabName, "default", null, null, false) ::
+ Row(tabName, "default", null, "MANAGED", false) :: Nil)
+ assert(spark.catalog.getTable("default", indexTabName).name === indexTabName)
+
intercept[TableAlreadyExistsException] {
sql(s"CREATE TABLE $indexTabName(b int)")
}