aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-03-28 16:25:15 -0700
committerAndrew Or <andrew@databricks.com>2016-03-28 16:25:15 -0700
commiteebc8c1c95fb7752d09a5846b7cac65f7702c8f2 (patch)
tree5be4767998f619a02fae8035dc3f989dafbc6547 /sql/hive/src
parent34c0638ee6f05aef81d90594dd9b8e06006c2c7f (diff)
downloadspark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.tar.gz
spark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.tar.bz2
spark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.zip
[SPARK-13923][SPARK-14014][SQL] Session catalog follow-ups
## What changes were proposed in this pull request? This patch addresses the remaining comments left in #11750 and #11918 after they are merged. For a full list of changes in this patch, just trace the commits. ## How was this patch tested? `SessionCatalogSuite` and `CatalogTestCases` Author: Andrew Or <andrew@databricks.com> Closes #12006 from andrewor14/session-catalog-followup.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala (renamed from sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala)11
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala22
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala11
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala6
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala4
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogSuite.scala (renamed from sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala)6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala16
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala2
15 files changed, 54 insertions, 50 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index ca3ce43591..c0b6d16d3c 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -86,7 +86,7 @@ class HiveContext private[hive](
@transient private[hive] val executionHive: HiveClientImpl,
@transient private[hive] val metadataHive: HiveClient,
isRootContext: Boolean,
- @transient private[sql] val hiveCatalog: HiveCatalog)
+ @transient private[sql] val hiveCatalog: HiveExternalCatalog)
extends SQLContext(sc, cacheManager, listener, isRootContext, hiveCatalog) with Logging {
self =>
@@ -98,7 +98,7 @@ class HiveContext private[hive](
execHive,
metaHive,
true,
- new HiveCatalog(metaHive))
+ new HiveExternalCatalog(metaHive))
}
def this(sc: SparkContext) = {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index 0722fb02a8..f75509fe80 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -34,7 +34,7 @@ import org.apache.spark.sql.hive.client.HiveClient
* A persistent implementation of the system catalog using Hive.
* All public methods must be synchronized for thread-safety.
*/
-private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog with Logging {
+private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCatalog with Logging {
import ExternalCatalog._
// Exceptions thrown by the hive client that we would like to wrap
@@ -74,10 +74,10 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
}
private def requireDbMatches(db: String, table: CatalogTable): Unit = {
- if (table.name.database != Some(db)) {
+ if (table.identifier.database != Some(db)) {
throw new AnalysisException(
s"Provided database $db does not much the one specified in the " +
- s"table definition (${table.name.database.getOrElse("n/a")})")
+ s"table definition (${table.identifier.database.getOrElse("n/a")})")
}
}
@@ -160,7 +160,8 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
}
override def renameTable(db: String, oldName: String, newName: String): Unit = withClient {
- val newTable = client.getTable(db, oldName).copy(name = TableIdentifier(newName, Some(db)))
+ val newTable = client.getTable(db, oldName)
+ .copy(identifier = TableIdentifier(newName, Some(db)))
client.alterTable(oldName, newTable)
}
@@ -173,7 +174,7 @@ private[spark] class HiveCatalog(client: HiveClient) extends ExternalCatalog wit
*/
override def alterTable(db: String, tableDefinition: CatalogTable): Unit = withClient {
requireDbMatches(db, tableDefinition)
- requireTableExists(db, tableDefinition.name.table)
+ requireTableExists(db, tableDefinition.identifier.table)
client.alterTable(tableDefinition)
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index c7066d7363..eedd12d76a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -102,7 +102,7 @@ private[hive] object HiveSerDe {
* Legacy catalog for interacting with the Hive metastore.
*
* This is still used for things like creating data source tables, but in the future will be
- * cleaned up to integrate more nicely with [[HiveCatalog]].
+ * cleaned up to integrate more nicely with [[HiveExternalCatalog]].
*/
private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveContext)
extends Logging {
@@ -124,8 +124,8 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
private def getQualifiedTableName(t: CatalogTable): QualifiedTableName = {
QualifiedTableName(
- t.name.database.getOrElse(getCurrentDatabase).toLowerCase,
- t.name.table.toLowerCase)
+ t.identifier.database.getOrElse(getCurrentDatabase).toLowerCase,
+ t.identifier.table.toLowerCase)
}
/** A cache of Spark SQL data source tables that have been accessed. */
@@ -299,7 +299,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
def newSparkSQLSpecificMetastoreTable(): CatalogTable = {
CatalogTable(
- name = TableIdentifier(tblName, Option(dbName)),
+ identifier = TableIdentifier(tblName, Option(dbName)),
tableType = tableType,
schema = Nil,
storage = CatalogStorageFormat(
@@ -319,7 +319,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
assert(relation.partitionSchema.isEmpty)
CatalogTable(
- name = TableIdentifier(tblName, Option(dbName)),
+ identifier = TableIdentifier(tblName, Option(dbName)),
tableType = tableType,
storage = CatalogStorageFormat(
locationUri = Some(relation.location.paths.map(_.toUri.toString).head),
@@ -431,7 +431,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
alias match {
// because hive use things like `_c0` to build the expanded text
// currently we cannot support view from "create view v1(c1) as ..."
- case None => SubqueryAlias(table.name.table, hive.parseSql(viewText))
+ case None => SubqueryAlias(table.identifier.table, hive.parseSql(viewText))
case Some(aliasText) => SubqueryAlias(aliasText, hive.parseSql(viewText))
}
} else {
@@ -611,7 +611,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val QualifiedTableName(dbName, tblName) = getQualifiedTableName(table)
execution.CreateViewAsSelect(
- table.copy(name = TableIdentifier(tblName, Some(dbName))),
+ table.copy(identifier = TableIdentifier(tblName, Some(dbName))),
child,
allowExisting,
replace)
@@ -633,7 +633,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
if (hive.convertCTAS && table.storage.serde.isEmpty) {
// Do the conversion when spark.sql.hive.convertCTAS is true and the query
// does not specify any storage format (file format and storage handler).
- if (table.name.database.isDefined) {
+ if (table.identifier.database.isDefined) {
throw new AnalysisException(
"Cannot specify database name in a CTAS statement " +
"when spark.sql.hive.convertCTAS is set to true.")
@@ -641,7 +641,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val mode = if (allowExisting) SaveMode.Ignore else SaveMode.ErrorIfExists
CreateTableUsingAsSelect(
- TableIdentifier(desc.name.table),
+ TableIdentifier(desc.identifier.table),
conf.defaultDataSourceName,
temporary = false,
Array.empty[String],
@@ -662,7 +662,7 @@ private[hive] class HiveMetastoreCatalog(val client: HiveClient, hive: HiveConte
val QualifiedTableName(dbName, tblName) = getQualifiedTableName(table)
execution.CreateTableAsSelect(
- desc.copy(name = TableIdentifier(tblName, Some(dbName))),
+ desc.copy(identifier = TableIdentifier(tblName, Some(dbName))),
child,
allowExisting)
}
@@ -792,7 +792,7 @@ private[hive] case class MetastoreRelation(
// We start by constructing an API table as Hive performs several important transformations
// internally when converting an API table to a QL table.
val tTable = new org.apache.hadoop.hive.metastore.api.Table()
- tTable.setTableName(table.name.table)
+ tTable.setTableName(table.identifier.table)
tTable.setDbName(table.database)
val tableParameters = new java.util.HashMap[String, String]()
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index e5bcb9b1db..b3ec95fc73 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -60,7 +60,7 @@ private[hive] case class CreateTableAsSelect(
override def output: Seq[Attribute] = Seq.empty[Attribute]
override lazy val resolved: Boolean =
- tableDesc.name.database.isDefined &&
+ tableDesc.identifier.database.isDefined &&
tableDesc.schema.nonEmpty &&
tableDesc.storage.serde.isDefined &&
tableDesc.storage.inputFormat.isDefined &&
@@ -183,7 +183,7 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
val tableIdentifier = extractTableIdent(viewNameParts)
val originalText = query.source
val tableDesc = CatalogTable(
- name = tableIdentifier,
+ identifier = tableIdentifier,
tableType = CatalogTableType.VIRTUAL_VIEW,
schema = schema,
storage = CatalogStorageFormat(
@@ -352,7 +352,7 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
// TODO add bucket support
var tableDesc: CatalogTable = CatalogTable(
- name = tableIdentifier,
+ identifier = tableIdentifier,
tableType =
if (externalTable.isDefined) {
CatalogTableType.EXTERNAL_TABLE
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index aa44cba4b5..ec7bf61be1 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.types.StructType
class HiveSessionCatalog(
- externalCatalog: HiveCatalog,
+ externalCatalog: HiveExternalCatalog,
client: HiveClient,
context: HiveContext,
conf: SQLConf)
@@ -41,11 +41,11 @@ class HiveSessionCatalog(
override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = {
val table = formatTableName(name.table)
- if (name.database.isDefined || !tempTables.containsKey(table)) {
+ if (name.database.isDefined || !tempTables.contains(table)) {
val newName = name.copy(table = table)
metastoreCatalog.lookupRelation(newName, alias)
} else {
- val relation = tempTables.get(table)
+ val relation = tempTables(table)
val tableWithQualifiers = SubqueryAlias(table, relation)
// If an alias was specified by the lookup, wrap the plan in a subquery so that
// attributes are properly qualified with this alias.
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
index f4d30358ca..ee56f9d75d 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
@@ -88,7 +88,7 @@ private[hive] trait HiveClient {
def dropTable(dbName: String, tableName: String, ignoreIfNotExists: Boolean): Unit
/** Alter a table whose name matches the one specified in `table`, assuming it exists. */
- final def alterTable(table: CatalogTable): Unit = alterTable(table.name.table, table)
+ final def alterTable(table: CatalogTable): Unit = alterTable(table.identifier.table, table)
/** Updates the given table with new metadata, optionally renaming the table. */
def alterTable(tableName: String, table: CatalogTable): Unit
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index e4e15d13df..a31178e347 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -298,7 +298,7 @@ private[hive] class HiveClientImpl(
logDebug(s"Looking up $dbName.$tableName")
Option(client.getTable(dbName, tableName, false)).map { h =>
CatalogTable(
- name = TableIdentifier(h.getTableName, Option(h.getDbName)),
+ identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
tableType = h.getTableType match {
case HiveTableType.EXTERNAL_TABLE => CatalogTableType.EXTERNAL_TABLE
case HiveTableType.MANAGED_TABLE => CatalogTableType.MANAGED_TABLE
@@ -544,13 +544,14 @@ private[hive] class HiveClientImpl(
}
override def renameFunction(db: String, oldName: String, newName: String): Unit = withHiveState {
- val catalogFunc = getFunction(db, oldName).copy(name = FunctionIdentifier(newName, Some(db)))
+ val catalogFunc = getFunction(db, oldName)
+ .copy(identifier = FunctionIdentifier(newName, Some(db)))
val hiveFunc = toHiveFunction(catalogFunc, db)
client.alterFunction(db, oldName, hiveFunc)
}
override def alterFunction(db: String, func: CatalogFunction): Unit = withHiveState {
- client.alterFunction(db, func.name.funcName, toHiveFunction(func, db))
+ client.alterFunction(db, func.identifier.funcName, toHiveFunction(func, db))
}
override def getFunctionOption(
@@ -611,7 +612,7 @@ private[hive] class HiveClientImpl(
private def toHiveFunction(f: CatalogFunction, db: String): HiveFunction = {
new HiveFunction(
- f.name.funcName,
+ f.identifier.funcName,
db,
f.className,
null,
@@ -639,7 +640,7 @@ private[hive] class HiveClientImpl(
}
private def toHiveTable(table: CatalogTable): HiveTable = {
- val hiveTable = new HiveTable(table.database, table.name.table)
+ val hiveTable = new HiveTable(table.database, table.identifier.table)
hiveTable.setTableType(table.tableType match {
case CatalogTableType.EXTERNAL_TABLE => HiveTableType.EXTERNAL_TABLE
case CatalogTableType.MANAGED_TABLE => HiveTableType.MANAGED_TABLE
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
index 5a61eef0f2..29f7dc2997 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
@@ -38,7 +38,7 @@ case class CreateTableAsSelect(
allowExisting: Boolean)
extends RunnableCommand {
- private val tableIdentifier = tableDesc.name
+ private val tableIdentifier = tableDesc.identifier
override def children: Seq[LogicalPlan] = Seq(query)
@@ -93,6 +93,8 @@ case class CreateTableAsSelect(
}
override def argString: String = {
- s"[Database:${tableDesc.database}}, TableName: ${tableDesc.name.table}, InsertIntoHiveTable]"
+ s"[Database:${tableDesc.database}}, " +
+ s"TableName: ${tableDesc.identifier.table}, " +
+ s"InsertIntoHiveTable]"
}
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
index 9ff520da1d..33cd8b4480 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateViewAsSelect.scala
@@ -44,7 +44,7 @@ private[hive] case class CreateViewAsSelect(
assert(tableDesc.schema == Nil || tableDesc.schema.length == childSchema.length)
assert(tableDesc.viewText.isDefined)
- private val tableIdentifier = tableDesc.name
+ private val tableIdentifier = tableDesc.identifier
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
@@ -116,7 +116,7 @@ private[hive] case class CreateViewAsSelect(
}
val viewText = tableDesc.viewText.get
- val viewName = quote(tableDesc.name.table)
+ val viewName = quote(tableDesc.identifier.table)
s"SELECT $viewOutput FROM ($viewText) $viewName"
}
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index a1785ca038..4afc8d18a6 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -78,7 +78,7 @@ class TestHiveContext private[hive](
executionHive: HiveClientImpl,
metadataHive: HiveClient,
isRootContext: Boolean,
- hiveCatalog: HiveCatalog,
+ hiveCatalog: HiveExternalCatalog,
val warehousePath: File,
val scratchDirPath: File,
metastoreTemporaryConf: Map[String, String])
@@ -110,7 +110,7 @@ class TestHiveContext private[hive](
executionHive,
metadataHive,
true,
- new HiveCatalog(metadataHive),
+ new HiveExternalCatalog(metadataHive),
warehousePath,
scratchDirPath,
metastoreTemporaryConf)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogSuite.scala
index 427f5747a0..3334c16f0b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveCatalogSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogSuite.scala
@@ -26,9 +26,9 @@ import org.apache.spark.sql.hive.client.{HiveClient, IsolatedClientLoader}
import org.apache.spark.util.Utils
/**
- * Test suite for the [[HiveCatalog]].
+ * Test suite for the [[HiveExternalCatalog]].
*/
-class HiveCatalogSuite extends CatalogTestCases {
+class HiveExternalCatalogSuite extends CatalogTestCases {
private val client: HiveClient = {
IsolatedClientLoader.forVersion(
@@ -41,7 +41,7 @@ class HiveCatalogSuite extends CatalogTestCases {
protected override val utils: CatalogTestUtils = new CatalogTestUtils {
override val tableInputFormat: String = "org.apache.hadoop.mapred.SequenceFileInputFormat"
override val tableOutputFormat: String = "org.apache.hadoop.mapred.SequenceFileOutputFormat"
- override def newEmptyCatalog(): ExternalCatalog = new HiveCatalog(client)
+ override def newEmptyCatalog(): ExternalCatalog = new HiveExternalCatalog(client)
}
protected override def resetState(): Unit = client.reset()
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
index 1c775db9b6..0aaf57649c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveQlSuite.scala
@@ -54,8 +54,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val (desc, exists) = extractTableDesc(s1)
assert(exists)
- assert(desc.name.database == Some("mydb"))
- assert(desc.name.table == "page_view")
+ assert(desc.identifier.database == Some("mydb"))
+ assert(desc.identifier.table == "page_view")
assert(desc.tableType == CatalogTableType.EXTERNAL_TABLE)
assert(desc.storage.locationUri == Some("/user/external/page_view"))
assert(desc.schema ==
@@ -100,8 +100,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val (desc, exists) = extractTableDesc(s2)
assert(exists)
- assert(desc.name.database == Some("mydb"))
- assert(desc.name.table == "page_view")
+ assert(desc.identifier.database == Some("mydb"))
+ assert(desc.identifier.table == "page_view")
assert(desc.tableType == CatalogTableType.EXTERNAL_TABLE)
assert(desc.storage.locationUri == Some("/user/external/page_view"))
assert(desc.schema ==
@@ -127,8 +127,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
val s3 = """CREATE TABLE page_view AS SELECT * FROM src"""
val (desc, exists) = extractTableDesc(s3)
assert(exists == false)
- assert(desc.name.database == None)
- assert(desc.name.table == "page_view")
+ assert(desc.identifier.database == None)
+ assert(desc.identifier.table == "page_view")
assert(desc.tableType == CatalogTableType.MANAGED_TABLE)
assert(desc.storage.locationUri == None)
assert(desc.schema == Seq.empty[CatalogColumn])
@@ -162,8 +162,8 @@ class HiveQlSuite extends SparkFunSuite with BeforeAndAfterAll {
| ORDER BY key, value""".stripMargin
val (desc, exists) = extractTableDesc(s5)
assert(exists == false)
- assert(desc.name.database == None)
- assert(desc.name.table == "ctas2")
+ assert(desc.identifier.database == None)
+ assert(desc.identifier.table == "ctas2")
assert(desc.tableType == CatalogTableType.MANAGED_TABLE)
assert(desc.storage.locationUri == None)
assert(desc.schema == Seq.empty[CatalogColumn])
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
index 5272f4192e..e8188e5f02 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
@@ -34,7 +34,7 @@ class ListTablesSuite extends QueryTest with TestHiveSingleton with BeforeAndAft
super.beforeAll()
// The catalog in HiveContext is a case insensitive one.
sessionState.catalog.createTempTable(
- "ListTablesSuiteTable", df.logicalPlan, ignoreIfExists = true)
+ "ListTablesSuiteTable", df.logicalPlan, overrideIfExists = true)
sql("CREATE TABLE HiveListTablesSuiteTable (key int, value string)")
sql("CREATE DATABASE IF NOT EXISTS ListTablesSuiteDB")
sql("CREATE TABLE ListTablesSuiteDB.HiveInDBListTablesSuiteTable (key int, value string)")
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 71652897e6..3c299daa77 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -722,7 +722,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
withTable(tableName) {
val schema = StructType(StructField("int", IntegerType, true) :: Nil)
val hiveTable = CatalogTable(
- name = TableIdentifier(tableName, Some("default")),
+ identifier = TableIdentifier(tableName, Some("default")),
tableType = CatalogTableType.MANAGED_TABLE,
schema = Seq.empty,
storage = CatalogStorageFormat(
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index d59bca4c7e..8b0719209d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -148,7 +148,7 @@ class VersionsSuite extends SparkFunSuite with Logging {
test(s"$version: createTable") {
val table =
CatalogTable(
- name = TableIdentifier("src", Some("default")),
+ identifier = TableIdentifier("src", Some("default")),
tableType = CatalogTableType.MANAGED_TABLE,
schema = Seq(CatalogColumn("key", "int")),
storage = CatalogStorageFormat(