aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala7
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala109
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala73
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala21
4 files changed, 155 insertions, 55 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index b60499253c..95f3463dfe 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -591,8 +591,13 @@ class SparkSession private(
@transient lazy val catalog: Catalog = new CatalogImpl(self)
/**
- * Returns the specified table as a `DataFrame`.
+ * Returns the specified table/view as a `DataFrame`.
*
+ * @param tableName is either a qualified or unqualified name that designates a table or view.
+ * If a database is specified, it identifies the table/view from the database.
+ * Otherwise, it first attempts to find a temporary view with the given name
+ * and then match the table/view from the current database.
+ * Note that, the global temporary view database is also valid here.
* @since 2.0.0
*/
def table(tableName: String): DataFrame = {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
index 50252db789..137b0cbc84 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
@@ -54,16 +54,16 @@ abstract class Catalog {
def listDatabases(): Dataset[Database]
/**
- * Returns a list of tables in the current database.
- * This includes all temporary tables.
+ * Returns a list of tables/views in the current database.
+ * This includes all temporary views.
*
* @since 2.0.0
*/
def listTables(): Dataset[Table]
/**
- * Returns a list of tables in the specified database.
- * This includes all temporary tables.
+ * Returns a list of tables/views in the specified database.
+ * This includes all temporary views.
*
* @since 2.0.0
*/
@@ -88,17 +88,21 @@ abstract class Catalog {
def listFunctions(dbName: String): Dataset[Function]
/**
- * Returns a list of columns for the given table in the current database or
- * the given temporary table.
+ * Returns a list of columns for the given table/view or temporary view.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a temporary view or
+ * a table/view in the current database.
* @since 2.0.0
*/
@throws[AnalysisException]("table does not exist")
def listColumns(tableName: String): Dataset[Column]
/**
- * Returns a list of columns for the given table in the specified database.
+ * Returns a list of columns for the given table/view in the specified database.
*
+ * @param dbName is a name that designates a database.
+ * @param tableName is an unqualified name that designates a table/view.
* @since 2.0.0
*/
@throws[AnalysisException]("database or table does not exist")
@@ -115,9 +119,11 @@ abstract class Catalog {
/**
* Get the table or view with the specified name. This table can be a temporary view or a
- * table/view in the current database. This throws an AnalysisException when no Table
- * can be found.
+ * table/view. This throws an AnalysisException when no Table can be found.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a table/view in
+ * the current database.
* @since 2.1.0
*/
@throws[AnalysisException]("table does not exist")
@@ -134,9 +140,11 @@ abstract class Catalog {
/**
* Get the function with the specified name. This function can be a temporary function or a
- * function in the current database. This throws an AnalysisException when the function cannot
- * be found.
+ * function. This throws an AnalysisException when the function cannot be found.
*
+ * @param functionName is either a qualified or unqualified name that designates a function.
+ * If no database identifier is provided, it refers to a temporary function
+ * or a function in the current database.
* @since 2.1.0
*/
@throws[AnalysisException]("function does not exist")
@@ -146,6 +154,8 @@ abstract class Catalog {
* Get the function with the specified name. This throws an AnalysisException when the function
* cannot be found.
*
+ * @param dbName is a name that designates a database.
+ * @param functionName is an unqualified name that designates a function in the specified database
* @since 2.1.0
*/
@throws[AnalysisException]("database or function does not exist")
@@ -160,8 +170,11 @@ abstract class Catalog {
/**
* Check if the table or view with the specified name exists. This can either be a temporary
- * view or a table/view in the current database.
+ * view or a table/view.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a table/view in
+ * the current database.
* @since 2.1.0
*/
def tableExists(tableName: String): Boolean
@@ -169,14 +182,19 @@ abstract class Catalog {
/**
* Check if the table or view with the specified name exists in the specified database.
*
+ * @param dbName is a name that designates a database.
+ * @param tableName is an unqualified name that designates a table.
* @since 2.1.0
*/
def tableExists(dbName: String, tableName: String): Boolean
/**
* Check if the function with the specified name exists. This can either be a temporary function
- * or a function in the current database.
+ * or a function.
*
+ * @param functionName is either a qualified or unqualified name that designates a function.
+ * If no database identifier is provided, it refers to a function in
+ * the current database.
* @since 2.1.0
*/
def functionExists(functionName: String): Boolean
@@ -184,6 +202,8 @@ abstract class Catalog {
/**
* Check if the function with the specified name exists in the specified database.
*
+ * @param dbName is a name that designates a database.
+ * @param functionName is an unqualified name that designates a function.
* @since 2.1.0
*/
def functionExists(dbName: String, functionName: String): Boolean
@@ -192,6 +212,9 @@ abstract class Catalog {
* Creates a table from the given path and returns the corresponding DataFrame.
* It will use the default data source configured by spark.sql.sources.default.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -204,6 +227,9 @@ abstract class Catalog {
* Creates a table from the given path and returns the corresponding DataFrame.
* It will use the default data source configured by spark.sql.sources.default.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -214,6 +240,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and returns the corresponding
* DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -226,6 +255,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and returns the corresponding
* DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -236,6 +268,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -251,6 +286,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -267,6 +305,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -283,6 +324,9 @@ abstract class Catalog {
* Creates a table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -297,6 +341,9 @@ abstract class Catalog {
* Create a table from the given path based on a data source, a schema and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -313,6 +360,9 @@ abstract class Catalog {
* Create a table from the given path based on a data source, a schema and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -330,6 +380,9 @@ abstract class Catalog {
* Create a table from the given path based on a data source, a schema and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.0.0
*/
@deprecated("use createTable instead.", "2.2.0")
@@ -347,6 +400,9 @@ abstract class Catalog {
* Create a table from the given path based on a data source, a schema and a set of options.
* Then, returns the corresponding DataFrame.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in
+ * the current database.
* @since 2.2.0
*/
@Experimental
@@ -368,7 +424,7 @@ abstract class Catalog {
* Note that, the return type of this method was Unit in Spark 2.0, but changed to Boolean
* in Spark 2.1.
*
- * @param viewName the name of the view to be dropped.
+ * @param viewName the name of the temporary view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.0.0
*/
@@ -383,15 +439,18 @@ abstract class Catalog {
* preserved database `global_temp`, and we must use the qualified name to refer a global temp
* view, e.g. `SELECT * FROM global_temp.view1`.
*
- * @param viewName the name of the view to be dropped.
+ * @param viewName the unqualified name of the temporary view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.1.0
*/
def dropGlobalTempView(viewName: String): Boolean
/**
- * Recover all the partitions in the directory of a table and update the catalog.
+ * Recovers all the partitions in the directory of a table and update the catalog.
*
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in the
+ * current database.
* @since 2.1.1
*/
def recoverPartitions(tableName: String): Unit
@@ -399,6 +458,9 @@ abstract class Catalog {
/**
* Returns true if the table is currently cached in-memory.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a temporary view or
+ * a table/view in the current database.
* @since 2.0.0
*/
def isCached(tableName: String): Boolean
@@ -406,6 +468,9 @@ abstract class Catalog {
/**
* Caches the specified table in-memory.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a temporary view or
+ * a table/view in the current database.
* @since 2.0.0
*/
def cacheTable(tableName: String): Unit
@@ -413,6 +478,9 @@ abstract class Catalog {
/**
* Removes the specified table from the in-memory cache.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a temporary view or
+ * a table/view in the current database.
* @since 2.0.0
*/
def uncacheTable(tableName: String): Unit
@@ -425,7 +493,7 @@ abstract class Catalog {
def clearCache(): Unit
/**
- * Invalidate and refresh all the cached metadata of the given table. For performance reasons,
+ * Invalidates and refreshes all the cached metadata of the given table. For performance reasons,
* Spark SQL or the external data source library it uses might cache certain metadata about a
* table, such as the location of blocks. When those change outside of Spark SQL, users should
* call this function to invalidate the cache.
@@ -433,13 +501,16 @@ abstract class Catalog {
* If this table is cached as an InMemoryRelation, drop the original cached version and make the
* new version cached lazily.
*
+ * @param tableName is either a qualified or unqualified name that designates a table/view.
+ * If no database identifier is provided, it refers to a temporary view or
+ * a table/view in the current database.
* @since 2.0.0
*/
def refreshTable(tableName: String): Unit
/**
- * Invalidate and refresh all the cached data (and the associated metadata) for any dataframe that
- * contains the given data source path. Path matching is by prefix, i.e. "/" would invalidate
+ * Invalidates and refreshes all the cached data (and the associated metadata) for any [[Dataset]]
+ * that contains the given data source path. Path matching is by prefix, i.e. "/" would invalidate
* everything that is cached.
*
* @since 2.0.0
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index 53374859f1..5d1c35aba5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -19,8 +19,6 @@ package org.apache.spark.sql.internal
import scala.reflect.runtime.universe.TypeTag
-import org.apache.hadoop.fs.Path
-
import org.apache.spark.annotation.Experimental
import org.apache.spark.sql._
import org.apache.spark.sql.catalog.{Catalog, Column, Database, Function, Table}
@@ -143,11 +141,12 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Returns a list of columns for the given table in the current database.
+ * Returns a list of columns for the given table temporary view.
*/
@throws[AnalysisException]("table does not exist")
override def listColumns(tableName: String): Dataset[Column] = {
- listColumns(TableIdentifier(tableName, None))
+ val tableIdent = sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+ listColumns(tableIdent)
}
/**
@@ -177,7 +176,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Get the database with the specified name. This throws an `AnalysisException` when no
+ * Gets the database with the specified name. This throws an `AnalysisException` when no
* `Database` can be found.
*/
override def getDatabase(dbName: String): Database = {
@@ -185,16 +184,16 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Get the table or view with the specified name. This table can be a temporary view or a
- * table/view in the current database. This throws an `AnalysisException` when no `Table`
- * can be found.
+ * Gets the table or view with the specified name. This table can be a temporary view or a
+ * table/view. This throws an `AnalysisException` when no `Table` can be found.
*/
override def getTable(tableName: String): Table = {
- getTable(null, tableName)
+ val tableIdent = sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+ getTable(tableIdent.database.orNull, tableIdent.table)
}
/**
- * Get the table or view with the specified name in the specified database. This throws an
+ * Gets the table or view with the specified name in the specified database. This throws an
* `AnalysisException` when no `Table` can be found.
*/
override def getTable(dbName: String, tableName: String): Table = {
@@ -202,16 +201,16 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Get the function with the specified name. This function can be a temporary function or a
- * function in the current database. This throws an `AnalysisException` when no `Function`
- * can be found.
+ * Gets the function with the specified name. This function can be a temporary function or a
+ * function. This throws an `AnalysisException` when no `Function` can be found.
*/
override def getFunction(functionName: String): Function = {
- getFunction(null, functionName)
+ val functionIdent = sparkSession.sessionState.sqlParser.parseFunctionIdentifier(functionName)
+ getFunction(functionIdent.database.orNull, functionIdent.funcName)
}
/**
- * Get the function with the specified name. This returns `None` when no `Function` can be
+ * Gets the function with the specified name. This returns `None` when no `Function` can be
* found.
*/
override def getFunction(dbName: String, functionName: String): Function = {
@@ -219,22 +218,23 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Check if the database with the specified name exists.
+ * Checks if the database with the specified name exists.
*/
override def databaseExists(dbName: String): Boolean = {
sessionCatalog.databaseExists(dbName)
}
/**
- * Check if the table or view with the specified name exists. This can either be a temporary
- * view or a table/view in the current database.
+ * Checks if the table or view with the specified name exists. This can either be a temporary
+ * view or a table/view.
*/
override def tableExists(tableName: String): Boolean = {
- tableExists(null, tableName)
+ val tableIdent = sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName)
+ tableExists(tableIdent.database.orNull, tableIdent.table)
}
/**
- * Check if the table or view with the specified name exists in the specified database.
+ * Checks if the table or view with the specified name exists in the specified database.
*/
override def tableExists(dbName: String, tableName: String): Boolean = {
val tableIdent = TableIdentifier(tableName, Option(dbName))
@@ -242,15 +242,16 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Check if the function with the specified name exists. This can either be a temporary function
- * or a function in the current database.
+ * Checks if the function with the specified name exists. This can either be a temporary function
+ * or a function.
*/
override def functionExists(functionName: String): Boolean = {
- functionExists(null, functionName)
+ val functionIdent = sparkSession.sessionState.sqlParser.parseFunctionIdentifier(functionName)
+ functionExists(functionIdent.database.orNull, functionIdent.funcName)
}
/**
- * Check if the function with the specified name exists in the specified database.
+ * Checks if the function with the specified name exists in the specified database.
*/
override def functionExists(dbName: String, functionName: String): Boolean = {
sessionCatalog.functionExists(FunctionIdentifier(functionName, Option(dbName)))
@@ -303,7 +304,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
/**
* :: Experimental ::
* (Scala-specific)
- * Create a table from the given path based on a data source, a schema and a set of options.
+ * Creates a table from the given path based on a data source, a schema and a set of options.
* Then, returns the corresponding DataFrame.
*
* @group ddl_ops
@@ -338,7 +339,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
* Drops the local temporary view with the given view name in the catalog.
* If the view has been cached/persisted before, it's also unpersisted.
*
- * @param viewName the name of the view to be dropped.
+ * @param viewName the identifier of the temporary view to be dropped.
* @group ddl_ops
* @since 2.0.0
*/
@@ -353,7 +354,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
* Drops the global temporary view with the given view name in the catalog.
* If the view has been cached/persisted before, it's also unpersisted.
*
- * @param viewName the name of the view to be dropped.
+ * @param viewName the identifier of the global temporary view to be dropped.
* @group ddl_ops
* @since 2.1.0
*/
@@ -365,9 +366,11 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Recover all the partitions in the directory of a table and update the catalog.
+ * Recovers all the partitions in the directory of a table and update the catalog.
*
- * @param tableName the name of the table to be repaired.
+ * @param tableName is either a qualified or unqualified name that designates a table.
+ * If no database identifier is provided, it refers to a table in the
+ * current database.
* @group ddl_ops
* @since 2.1.1
*/
@@ -378,7 +381,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Returns true if the table is currently cached in-memory.
+ * Returns true if the table or view is currently cached in-memory.
*
* @group cachemgmt
* @since 2.0.0
@@ -388,7 +391,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Caches the specified table in-memory.
+ * Caches the specified table or view in-memory.
*
* @group cachemgmt
* @since 2.0.0
@@ -398,7 +401,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Removes the specified table from the in-memory cache.
+ * Removes the specified table or view from the in-memory cache.
*
* @group cachemgmt
* @since 2.0.0
@@ -408,7 +411,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Removes all cached tables from the in-memory cache.
+ * Removes all cached tables or views from the in-memory cache.
*
* @group cachemgmt
* @since 2.0.0
@@ -428,7 +431,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Refresh the cache entry for a table, if any. For Hive metastore table, the metadata
+ * Refreshes the cache entry for a table or view, if any. For Hive metastore table, the metadata
* is refreshed. For data source tables, the schema will not be inferred and refreshed.
*
* @group cachemgmt
@@ -452,7 +455,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
}
/**
- * Refresh the cache entry and the associated metadata for all dataframes (if any), that contain
+ * Refreshes the cache entry and the associated metadata for all Dataset (if any), that contain
* the given data source path.
*
* @group cachemgmt
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index 9742b3b2d5..6469e501c1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -102,6 +102,11 @@ class CatalogSuite
assert(col.isPartition == tableMetadata.partitionColumnNames.contains(col.name))
assert(col.isBucket == bucketColumnNames.contains(col.name))
}
+
+ dbName.foreach { db =>
+ val expected = columns.collect().map(_.name).toSet
+ assert(spark.catalog.listColumns(s"$db.$tableName").collect().map(_.name).toSet == expected)
+ }
}
override def afterEach(): Unit = {
@@ -345,6 +350,7 @@ class CatalogSuite
// Find a qualified table
assert(spark.catalog.getTable(db, "tbl_y").name === "tbl_y")
+ assert(spark.catalog.getTable(s"$db.tbl_y").name === "tbl_y")
// Find an unqualified table using the current database
intercept[AnalysisException](spark.catalog.getTable("tbl_y"))
@@ -378,6 +384,11 @@ class CatalogSuite
assert(fn2.database === db)
assert(!fn2.isTemporary)
+ val fn2WithQualifiedName = spark.catalog.getFunction(s"$db.fn2")
+ assert(fn2WithQualifiedName.name === "fn2")
+ assert(fn2WithQualifiedName.database === db)
+ assert(!fn2WithQualifiedName.isTemporary)
+
// Find an unqualified function using the current database
intercept[AnalysisException](spark.catalog.getFunction("fn2"))
spark.catalog.setCurrentDatabase(db)
@@ -403,6 +414,7 @@ class CatalogSuite
assert(!spark.catalog.tableExists("tbl_x"))
assert(!spark.catalog.tableExists("tbl_y"))
assert(!spark.catalog.tableExists(db, "tbl_y"))
+ assert(!spark.catalog.tableExists(s"$db.tbl_y"))
// Create objects.
createTempTable("tbl_x")
@@ -413,11 +425,15 @@ class CatalogSuite
// Find a qualified table
assert(spark.catalog.tableExists(db, "tbl_y"))
+ assert(spark.catalog.tableExists(s"$db.tbl_y"))
// Find an unqualified table using the current database
assert(!spark.catalog.tableExists("tbl_y"))
spark.catalog.setCurrentDatabase(db)
assert(spark.catalog.tableExists("tbl_y"))
+
+ // Unable to find the table, although the temp view with the given name exists
+ assert(!spark.catalog.tableExists(db, "tbl_x"))
}
}
}
@@ -429,6 +445,7 @@ class CatalogSuite
assert(!spark.catalog.functionExists("fn1"))
assert(!spark.catalog.functionExists("fn2"))
assert(!spark.catalog.functionExists(db, "fn2"))
+ assert(!spark.catalog.functionExists(s"$db.fn2"))
// Create objects.
createTempFunction("fn1")
@@ -439,11 +456,15 @@ class CatalogSuite
// Find a qualified function
assert(spark.catalog.functionExists(db, "fn2"))
+ assert(spark.catalog.functionExists(s"$db.fn2"))
// Find an unqualified function using the current database
assert(!spark.catalog.functionExists("fn2"))
spark.catalog.setCurrentDatabase(db)
assert(spark.catalog.functionExists("fn2"))
+
+ // Unable to find the function, although the temp function with the given name exists
+ assert(!spark.catalog.functionExists(db, "fn1"))
}
}
}