aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-03-16 18:02:43 -0700
committerYin Huai <yhuai@databricks.com>2016-03-16 18:02:43 -0700
commitca9ef86c84ee84263f437a979017898f4bed0feb (patch)
tree8dbcb871a0664a2375bd59ffbecdbf7bdc23c4d8 /sql/hive
parent92b70576eabf8ff94ac476e2b3c66f8b3d28e79e (diff)
downloadspark-ca9ef86c84ee84263f437a979017898f4bed0feb.tar.gz
spark-ca9ef86c84ee84263f437a979017898f4bed0feb.tar.bz2
spark-ca9ef86c84ee84263f437a979017898f4bed0feb.zip
[SPARK-13923][SQL] Implement SessionCatalog
## What changes were proposed in this pull request? As part of the effort to merge `SQLContext` and `HiveContext`, this patch implements an internal catalog called `SessionCatalog` that handles temporary functions and tables and delegates metastore operations to `ExternalCatalog`. Currently, this is still dead code, but in the future it will be part of `SessionState` and will replace `o.a.s.sql.catalyst.analysis.Catalog`. A recent patch #11573 parses Hive commands ourselves in Spark, but still passes the entire query text to Hive. In a future patch, we will use `SessionCatalog` to implement the parsed commands. ## How was this patch tested? 800+ lines of tests in `SessionCatalogSuite`. Author: Andrew Or <andrew@databricks.com> Closes #11750 from andrewor14/temp-catalog.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala9
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala26
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala14
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala15
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala13
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala16
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala4
11 files changed, 49 insertions, 61 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala
index 5185e9aac0..439501fe19 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala
@@ -24,6 +24,7 @@ import org.apache.thrift.TException
import org.apache.spark.Logging
import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchItemException
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.hive.client.HiveClient
@@ -73,10 +74,10 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
}
private def requireDbMatches(db: String, table: CatalogTable): Unit = {
- if (table.specifiedDatabase != Some(db)) {
+ if (table.name.database != Some(db)) {
throw new AnalysisException(
s"Provided database $db does not much the one specified in the " +
- s"table definition (${table.specifiedDatabase.getOrElse("n/a")})")
+ s"table definition (${table.name.database.getOrElse("n/a")})")
}
}
@@ -160,7 +161,7 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
}
override def renameTable(db: String, oldName: String, newName: String): Unit = withClient {
- val newTable = client.getTable(db, oldName).copy(name = newName)
+ val newTable = client.getTable(db, oldName).copy(name = TableIdentifier(newName, Some(db)))
client.alterTable(oldName, newTable)
}
@@ -173,7 +174,7 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
*/
override def alterTable(db: String, tableDefinition: CatalogTable): Unit = withClient {
requireDbMatches(db, tableDefinition)
- requireTableExists(db, tableDefinition.name)
+ requireTableExists(db, tableDefinition.name.table)
client.alterTable(tableDefinition)
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index c70510b483..b6c78691e4 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -118,8 +118,8 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
private def getQualifiedTableName(t: CatalogTable): QualifiedTableName = {
QualifiedTableName(
- t.specifiedDatabase.getOrElse(client.currentDatabase).toLowerCase,
- t.name.toLowerCase)
+ t.name.database.getOrElse(client.currentDatabase).toLowerCase,
+ t.name.table.toLowerCase)
}
/** A cache of Spark SQL data source tables that have been accessed. */
@@ -293,8 +293,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
def newSparkSQLSpecificMetastoreTable(): CatalogTable = {
CatalogTable(
- specifiedDatabase = Option(dbName),
- name = tblName,
+ name = TableIdentifier(tblName, Option(dbName)),
tableType = tableType,
schema = Nil,
storage = CatalogStorageFormat(
@@ -314,8 +313,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
assert(relation.partitionSchema.isEmpty)
CatalogTable(
- specifiedDatabase = Option(dbName),
- name = tblName,
+ name = TableIdentifier(tblName, Option(dbName)),
tableType = tableType,
storage = CatalogStorageFormat(
locationUri = Some(relation.location.paths.map(_.toUri.toString).head),
@@ -432,7 +430,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
alias match {
// because hive use things like `_c0` to build the expanded text
// currently we cannot support view from "create view v1(c1) as ..."
- case None => SubqueryAlias(table.name, hive.parseSql(viewText))
+ case None => SubqueryAlias(table.name.table, hive.parseSql(viewText))
case Some(aliasText) => SubqueryAlias(aliasText, hive.parseSql(viewText))
}
} else {
@@ -618,9 +616,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val QualifiedTableName(dbName, tblName) = getQualifiedTableName(table)
execution.CreateViewAsSelect(
- table.copy(
- specifiedDatabase = Some(dbName),
- name = tblName),
+ table.copy(name = TableIdentifier(tblName, Some(dbName))),
child,
allowExisting,
replace)
@@ -642,7 +638,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
if (hive.convertCTAS && table.storage.serde.isEmpty) {
// Do the conversion when spark.sql.hive.convertCTAS is true and the query
// does not specify any storage format (file format and storage handler).
- if (table.specifiedDatabase.isDefined) {
+ if (table.name.database.isDefined) {
throw new AnalysisException(
"Cannot specify database name in a CTAS statement " +
"when spark.sql.hive.convertCTAS is set to true.")
@@ -650,7 +646,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val mode = if (allowExisting) SaveMode.Ignore else SaveMode.ErrorIfExists
CreateTableUsingAsSelect(
- TableIdentifier(desc.name),
+ TableIdentifier(desc.name.table),
conf.defaultDataSourceName,
temporary = false,
Array.empty[String],
@@ -671,9 +667,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val QualifiedTableName(dbName, tblName) = getQualifiedTableName(table)
execution.CreateTableAsSelect(
- desc.copy(
- specifiedDatabase = Some(dbName),
- name = tblName),
+ desc.copy(name = TableIdentifier(tblName, Some(dbName))),
child,
allowExisting)
}
@@ -824,7 +818,7 @@ private[hive] case class MetastoreRelation(
// We start by constructing an API table as Hive performs several important transformations
// internally when converting an API table to a QL table.
val tTable = new org.apache.hadoop.hive.metastore.api.Table()
- tTable.setTableName(table.name)
+ tTable.setTableName(table.name.table)
tTable.setDbName(table.database)
val tableParameters = new java.util.HashMap[String, String]()
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 739fbaf444..00fc8af578 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -60,7 +60,7 @@ private[hive] case class CreateTableAsSelect(
override def output: Seq[Attribute] = Seq.empty[Attribute]
override lazy val resolved: Boolean =
- tableDesc.specifiedDatabase.isDefined &&
+ tableDesc.name.database.isDefined &&
tableDesc.schema.nonEmpty &&
tableDesc.storage.serde.isDefined &&
tableDesc.storage.inputFormat.isDefined &&
@@ -185,13 +185,10 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
properties: Map[String, String],
allowExist: Boolean,
replace: Boolean): CreateViewAsSelect = {
- val TableIdentifier(viewName, dbName) = extractTableIdent(viewNameParts)
-
+ val tableIdentifier = extractTableIdent(viewNameParts)
val originalText = query.source
-
val tableDesc = CatalogTable(
- specifiedDatabase = dbName,
- name = viewName,
+ name = tableIdentifier,
tableType = CatalogTableType.VIRTUAL_VIEW,
schema = schema,
storage = CatalogStorageFormat(
@@ -356,12 +353,11 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
"TOK_TABLELOCATION",
"TOK_TABLEPROPERTIES"),
children)
- val TableIdentifier(tblName, dbName) = extractTableIdent(tableNameParts)
+ val tableIdentifier = extractTableIdent(tableNameParts)
// TODO add bucket support
var tableDesc: CatalogTable = CatalogTable(
- specifiedDatabase = dbName,
- name = tblName,
+ name = tableIdentifier,
tableType =
if (externalTable.isDefined) {
CatalogTableType.EXTERNAL_TABLE
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
index b32aff25be..d214e5288e 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
@@ -91,7 +91,7 @@ private[hive] trait HiveClient {
def dropTable(dbName: String, tableName: String, ignoreIfNotExists: Boolean): Unit
/** Alter a table whose name matches the one specified in `table`, assuming it exists. */
- final def alterTable(table: CatalogTable): Unit = alterTable(table.name, table)
+ final def alterTable(table: CatalogTable): Unit = alterTable(table.name.table, table)
/** Updates the given table with new metadata, optionally renaming the table. */
def alterTable(tableName: String, table: CatalogTable): Unit
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index c108750c38..3040ec93f8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.session.SessionState
import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.{Logging, SparkConf, SparkException}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.Expression
@@ -298,8 +299,7 @@ private[hive] class HiveClientImpl(
logDebug(s"Looking up $dbName.$tableName")
Option(client.getTable(dbName, tableName, false)).map { h =>
CatalogTable(
- specifiedDatabase = Option(h.getDbName),
- name = h.getTableName,
+ name = TableIdentifier(h.getTableName, Option(h.getDbName)),
tableType = h.getTableType match {
case HiveTableType.EXTERNAL_TABLE => CatalogTableType.EXTERNAL_TABLE
case HiveTableType.MANAGED_TABLE => CatalogTableType.MANAGED_TABLE
@@ -545,13 +545,13 @@ private[hive] class HiveClientImpl(
}
override def renameFunction(db: String, oldName: String, newName: String): Unit = withHiveState {
- val catalogFunc = getFunction(db, oldName).copy(name = newName)
+ val catalogFunc = getFunction(db, oldName).copy(name = FunctionIdentifier(newName, Some(db)))
val hiveFunc = toHiveFunction(catalogFunc, db)
client.alterFunction(db, oldName, hiveFunc)
}
override def alterFunction(db: String, func: CatalogFunction): Unit = withHiveState {
- client.alterFunction(db, func.name, toHiveFunction(func, db))
+ client.alterFunction(db, func.name.funcName, toHiveFunction(func, db))
}
override def getFunctionOption(
@@ -612,7 +612,7 @@ private[hive] class HiveClientImpl(
private def toHiveFunction(f: CatalogFunction, db: String): HiveFunction = {
new HiveFunction(
- f.name,
+ f.name.funcName,
db,
f.className,
null,
@@ -623,7 +623,8 @@ private[hive] class HiveClientImpl(
}
private def fromHiveFunction(hf: HiveFunction): CatalogFunction = {
- new CatalogFunction(hf.getFunctionName, hf.getClassName)
+ val name = FunctionIdentifier(hf.getFunctionName, Option(hf.getDbName))
+ new CatalogFunction(name, hf.getClassName)
}
private def toHiveColumn(c: CatalogColumn): FieldSchema = {
@@ -639,7 +640,7 @@ private[hive] class HiveClientImpl(
}
private def toHiveTable(table: CatalogTable): HiveTable = {
- val hiveTable = new HiveTable(table.database, table.name)
+ val hiveTable = new HiveTable(table.database, table.name.table)
hiveTable.setTableType(table.tableType match {
case CatalogTableType.EXTERNAL_TABLE => HiveTableType.EXTERNAL_TABLE
case CatalogTableType.MANAGED_TABLE => HiveTableType.MANAGED_TABLE
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
index 91425d1435..391e2975d0 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
@@ -38,7 +38,7 @@ case class CreateTableAsSelect(
allowExisting: Boolean)
extends RunnableCommand {
- val tableIdentifier = TableIdentifier(tableDesc.name, Some(tableDesc.database))
+ private val tableIdentifier = tableDesc.name
override def children: Seq[LogicalPlan] = Seq(query)
@@ -93,6 +93,6 @@ case class CreateTableAsSelect(
}
override def argString: String = {
- s"[Database:${tableDesc.database}}, TableName: ${tableDesc.name}, InsertIntoHiveTable]"
+ s"[Database:${tableDesc.database}}, TableName: ${tableDesc.name.table}, InsertIntoHiveTable]"
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
index 6c2b88eb8c..8a1cf2caaa 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
@@ -44,7 +44,7 @@ private[hive] case class CreateViewAsSelect(
assert(tableDesc.schema == Nil || tableDesc.schema.length == childSchema.length)
assert(tableDesc.viewText.isDefined)
- val tableIdentifier = TableIdentifier(tableDesc.name, Some(tableDesc.database))
+ private val tableIdentifier = tableDesc.name
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
@@ -116,7 +116,7 @@ private[hive] case class CreateViewAsSelect(
}
val viewText = tableDesc.viewText.get
- val viewName = quote(tableDesc.name)
+ val viewName = quote(tableDesc.name.table)
s"SELECT $viewOutput FROM ($viewText) $viewName"
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala
index 2809f9439b..0dc4fea22d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala
@@ -36,15 +36,12 @@ class HiveCatalogSuite extends CatalogTestCases {
sparkConf = new SparkConf()).createClient()
}
- protected override val tableInputFormat: String =
- "org.apache.hadoop.mapred.SequenceFileInputFormat"
- protected override val tableOutputFormat: String =
- "org.apache.hadoop.mapred.SequenceFileOutputFormat"
-
- protected override def newUriForDatabase(): String = Utils.createTempDir().getAbsolutePath
+ protected override val utils: CatalogTestUtils = new CatalogTestUtils {
+ override val tableInputFormat: String = "org.apache.hadoop.mapred.SequenceFileInputFormat"
+ override val tableOutputFormat: String = "org.apache.hadoop.mapred.SequenceFileOutputFormat"
+ override def newEmptyCatalog(): ExternalCatalog = new HiveCatalog(client)
+ }
protected override def resetState(): Unit = client.reset()
- protected override def newEmptyCatalog(): ExternalCatalog = new HiveCatalog(client)
-
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
index 626550f56c..1c775db9b6 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
@@ -54,8 +54,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val (desc, exists) = extractTableDesc(s1)
assert(exists)
- assert(desc.specifiedDatabase == Some("mydb"))
- assert(desc.name == "page_view")
+ assert(desc.name.database == Some("mydb"))
+ assert(desc.name.table == "page_view")
assert(desc.tableType == CatalogTableType.EXTERNAL_TABLE)
assert(desc.storage.locationUri == Some("/user/external/page_view"))
assert(desc.schema ==
@@ -100,8 +100,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val (desc, exists) = extractTableDesc(s2)
assert(exists)
- assert(desc.specifiedDatabase == Some("mydb"))
- assert(desc.name == "page_view")
+ assert(desc.name.database == Some("mydb"))
+ assert(desc.name.table == "page_view")
assert(desc.tableType == CatalogTableType.EXTERNAL_TABLE)
assert(desc.storage.locationUri == Some("/user/external/page_view"))
assert(desc.schema ==
@@ -127,8 +127,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val s3 = """CREATE TABLE page_view AS SELECT * FROM src"""
val (desc, exists) = extractTableDesc(s3)
assert(exists == false)
- assert(desc.specifiedDatabase == None)
- assert(desc.name == "page_view")
+ assert(desc.name.database == None)
+ assert(desc.name.table == "page_view")
assert(desc.tableType == CatalogTableType.MANAGED_TABLE)
assert(desc.storage.locationUri == None)
assert(desc.schema == Seq.empty[CatalogColumn])
@@ -162,8 +162,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
| ORDER BY key, value""".stripMargin
val (desc, exists) = extractTableDesc(s5)
assert(exists == false)
- assert(desc.specifiedDatabase == None)
- assert(desc.name == "ctas2")
+ assert(desc.name.database == None)
+ assert(desc.name.table == "ctas2")
assert(desc.tableType == CatalogTableType.MANAGED_TABLE)
assert(desc.storage.locationUri == None)
assert(desc.schema == Seq.empty[CatalogColumn])
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 81420fea11..a80c35cd81 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -719,8 +719,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
withTable(tableName) {
val schema = StructType(StructField("int", IntegerType, true) :: Nil)
val hiveTable = CatalogTable(
- specifiedDatabase = Some("default"),
- name = tableName,
+ name = TableIdentifier(tableName, Some("default")),
tableType = CatalogTableType.MANAGED_TABLE,
schema = Seq.empty,
storage = CatalogStorageFormat(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index 6292f6c3af..3d54da11ad 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -22,6 +22,7 @@ import java.io.File
import org.apache.hadoop.util.VersionInfo
import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
+import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal, NamedExpression}
import org.apache.spark.sql.catalyst.util.quietly
@@ -129,8 +130,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
test(s"$version: createTable") {
val table =
CatalogTable(
- specifiedDatabase = Option("default"),
- name = "src",
+ name = TableIdentifier("src", Some("default")),
tableType = CatalogTableType.MANAGED_TABLE,
schema = Seq(CatalogColumn("key", "int")),
storage = CatalogStorageFormat(