aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-03-28 16:25:15 -0700
committerAndrew Or <andrew@databricks.com>2016-03-28 16:25:15 -0700
commiteebc8c1c95fb7752d09a5846b7cac65f7702c8f2 (patch)
tree5be4767998f619a02fae8035dc3f989dafbc6547 /sql/catalyst
parent34c0638ee6f05aef81d90594dd9b8e06006c2c7f (diff)
downloadspark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.tar.gz
spark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.tar.bz2
spark-eebc8c1c95fb7752d09a5846b7cac65f7702c8f2.zip
[SPARK-13923][SPARK-14014][SQL] Session catalog follow-ups
## What changes were proposed in this pull request? This patch addresses the remaining comments left in #11750 and #11918 after they are merged. For a full list of changes in this patch, just trace the commits. ## How was this patch tested? `SessionCatalogSuite` and `CatalogTestCases` Author: Andrew Or <andrew@databricks.com> Closes #12006 from andrewor14/session-catalog-followup.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala18
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala74
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala14
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala6
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala30
7 files changed, 74 insertions, 72 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index e216fa5528..2bbb970ec9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -155,7 +155,7 @@ class InMemoryCatalog extends ExternalCatalog {
tableDefinition: CatalogTable,
ignoreIfExists: Boolean): Unit = synchronized {
requireDbExists(db)
- val table = tableDefinition.name.table
+ val table = tableDefinition.identifier.table
if (tableExists(db, table)) {
if (!ignoreIfExists) {
throw new AnalysisException(s"Table '$table' already exists in database '$db'")
@@ -182,14 +182,14 @@ class InMemoryCatalog extends ExternalCatalog {
override def renameTable(db: String, oldName: String, newName: String): Unit = synchronized {
requireTableExists(db, oldName)
val oldDesc = catalog(db).tables(oldName)
- oldDesc.table = oldDesc.table.copy(name = TableIdentifier(newName, Some(db)))
+ oldDesc.table = oldDesc.table.copy(identifier = TableIdentifier(newName, Some(db)))
catalog(db).tables.put(newName, oldDesc)
catalog(db).tables.remove(oldName)
}
override def alterTable(db: String, tableDefinition: CatalogTable): Unit = synchronized {
- requireTableExists(db, tableDefinition.name.table)
- catalog(db).tables(tableDefinition.name.table).table = tableDefinition
+ requireTableExists(db, tableDefinition.identifier.table)
+ catalog(db).tables(tableDefinition.identifier.table).table = tableDefinition
}
override def getTable(db: String, table: String): CatalogTable = synchronized {
@@ -296,10 +296,10 @@ class InMemoryCatalog extends ExternalCatalog {
override def createFunction(db: String, func: CatalogFunction): Unit = synchronized {
requireDbExists(db)
- if (functionExists(db, func.name.funcName)) {
+ if (functionExists(db, func.identifier.funcName)) {
throw new AnalysisException(s"Function '$func' already exists in '$db' database")
} else {
- catalog(db).functions.put(func.name.funcName, func)
+ catalog(db).functions.put(func.identifier.funcName, func)
}
}
@@ -310,14 +310,14 @@ class InMemoryCatalog extends ExternalCatalog {
override def renameFunction(db: String, oldName: String, newName: String): Unit = synchronized {
requireFunctionExists(db, oldName)
- val newFunc = getFunction(db, oldName).copy(name = FunctionIdentifier(newName, Some(db)))
+ val newFunc = getFunction(db, oldName).copy(identifier = FunctionIdentifier(newName, Some(db)))
catalog(db).functions.remove(oldName)
catalog(db).functions.put(newName, newFunc)
}
override def alterFunction(db: String, funcDefinition: CatalogFunction): Unit = synchronized {
- requireFunctionExists(db, funcDefinition.name.funcName)
- catalog(db).functions.put(funcDefinition.name.funcName, funcDefinition)
+ requireFunctionExists(db, funcDefinition.identifier.funcName)
+ catalog(db).functions.put(funcDefinition.identifier.funcName, funcDefinition)
}
override def getFunction(db: String, funcName: String): CatalogFunction = synchronized {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 34265faa74..a9cf80764d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -17,9 +17,7 @@
package org.apache.spark.sql.catalyst.catalog
-import java.util.concurrent.ConcurrentHashMap
-
-import scala.collection.JavaConverters._
+import scala.collection.mutable
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{CatalystConf, SimpleCatalystConf}
@@ -31,6 +29,8 @@ import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
* An internal catalog that is used by a Spark Session. This internal catalog serves as a
* proxy to the underlying metastore (e.g. Hive Metastore) and it also manages temporary
* tables and functions of the Spark Session that it belongs to.
+ *
+ * This class is not thread-safe.
*/
class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
import ExternalCatalog._
@@ -39,8 +39,8 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
this(externalCatalog, new SimpleCatalystConf(true))
}
- protected[this] val tempTables = new ConcurrentHashMap[String, LogicalPlan]
- protected[this] val tempFunctions = new ConcurrentHashMap[String, CatalogFunction]
+ protected[this] val tempTables = new mutable.HashMap[String, LogicalPlan]
+ protected[this] val tempFunctions = new mutable.HashMap[String, CatalogFunction]
// Note: we track current database here because certain operations do not explicitly
// specify the database (e.g. DROP TABLE my_table). In these cases we must first
@@ -122,9 +122,9 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* If no such database is specified, create it in the current database.
*/
def createTable(tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit = {
- val db = tableDefinition.name.database.getOrElse(currentDb)
- val table = formatTableName(tableDefinition.name.table)
- val newTableDefinition = tableDefinition.copy(name = TableIdentifier(table, Some(db)))
+ val db = tableDefinition.identifier.database.getOrElse(currentDb)
+ val table = formatTableName(tableDefinition.identifier.table)
+ val newTableDefinition = tableDefinition.copy(identifier = TableIdentifier(table, Some(db)))
externalCatalog.createTable(db, newTableDefinition, ignoreIfExists)
}
@@ -138,9 +138,9 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* this becomes a no-op.
*/
def alterTable(tableDefinition: CatalogTable): Unit = {
- val db = tableDefinition.name.database.getOrElse(currentDb)
- val table = formatTableName(tableDefinition.name.table)
- val newTableDefinition = tableDefinition.copy(name = TableIdentifier(table, Some(db)))
+ val db = tableDefinition.identifier.database.getOrElse(currentDb)
+ val table = formatTableName(tableDefinition.identifier.table)
+ val newTableDefinition = tableDefinition.copy(identifier = TableIdentifier(table, Some(db)))
externalCatalog.alterTable(db, newTableDefinition)
}
@@ -164,9 +164,9 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
def createTempTable(
name: String,
tableDefinition: LogicalPlan,
- ignoreIfExists: Boolean): Unit = {
+ overrideIfExists: Boolean): Unit = {
val table = formatTableName(name)
- if (tempTables.containsKey(table) && !ignoreIfExists) {
+ if (tempTables.contains(table) && !overrideIfExists) {
throw new AnalysisException(s"Temporary table '$name' already exists.")
}
tempTables.put(table, tableDefinition)
@@ -188,10 +188,11 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
val db = oldName.database.getOrElse(currentDb)
val oldTableName = formatTableName(oldName.table)
val newTableName = formatTableName(newName.table)
- if (oldName.database.isDefined || !tempTables.containsKey(oldTableName)) {
+ if (oldName.database.isDefined || !tempTables.contains(oldTableName)) {
externalCatalog.renameTable(db, oldTableName, newTableName)
} else {
- val table = tempTables.remove(oldTableName)
+ val table = tempTables(oldTableName)
+ tempTables.remove(oldTableName)
tempTables.put(newTableName, table)
}
}
@@ -206,7 +207,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
def dropTable(name: TableIdentifier, ignoreIfNotExists: Boolean): Unit = {
val db = name.database.getOrElse(currentDb)
val table = formatTableName(name.table)
- if (name.database.isDefined || !tempTables.containsKey(table)) {
+ if (name.database.isDefined || !tempTables.contains(table)) {
externalCatalog.dropTable(db, table, ignoreIfNotExists)
} else {
tempTables.remove(table)
@@ -224,11 +225,11 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
val db = name.database.getOrElse(currentDb)
val table = formatTableName(name.table)
val relation =
- if (name.database.isDefined || !tempTables.containsKey(table)) {
+ if (name.database.isDefined || !tempTables.contains(table)) {
val metadata = externalCatalog.getTable(db, table)
CatalogRelation(db, metadata, alias)
} else {
- tempTables.get(table)
+ tempTables(table)
}
val qualifiedTable = SubqueryAlias(table, relation)
// If an alias was specified by the lookup, wrap the plan in a subquery so that
@@ -247,7 +248,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
def tableExists(name: TableIdentifier): Boolean = {
val db = name.database.getOrElse(currentDb)
val table = formatTableName(name.table)
- if (name.database.isDefined || !tempTables.containsKey(table)) {
+ if (name.database.isDefined || !tempTables.contains(table)) {
externalCatalog.tableExists(db, table)
} else {
true // it's a temporary table
@@ -266,7 +267,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
val dbTables =
externalCatalog.listTables(db, pattern).map { t => TableIdentifier(t, Some(db)) }
val regex = pattern.replaceAll("\\*", ".*").r
- val _tempTables = tempTables.keys().asScala
+ val _tempTables = tempTables.keys.toSeq
.filter { t => regex.pattern.matcher(t).matches() }
.map { t => TableIdentifier(t) }
dbTables ++ _tempTables
@@ -290,7 +291,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* For testing only.
*/
private[catalog] def getTempTable(name: String): Option[LogicalPlan] = {
- Option(tempTables.get(name))
+ tempTables.get(name)
}
// ----------------------------------------------------------------------------
@@ -399,9 +400,9 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* If no such database is specified, create it in the current database.
*/
def createFunction(funcDefinition: CatalogFunction): Unit = {
- val db = funcDefinition.name.database.getOrElse(currentDb)
+ val db = funcDefinition.identifier.database.getOrElse(currentDb)
val newFuncDefinition = funcDefinition.copy(
- name = FunctionIdentifier(funcDefinition.name.funcName, Some(db)))
+ identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db)))
externalCatalog.createFunction(db, newFuncDefinition)
}
@@ -424,9 +425,9 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* this becomes a no-op.
*/
def alterFunction(funcDefinition: CatalogFunction): Unit = {
- val db = funcDefinition.name.database.getOrElse(currentDb)
+ val db = funcDefinition.identifier.database.getOrElse(currentDb)
val newFuncDefinition = funcDefinition.copy(
- name = FunctionIdentifier(funcDefinition.name.funcName, Some(db)))
+ identifier = FunctionIdentifier(funcDefinition.identifier.funcName, Some(db)))
externalCatalog.alterFunction(db, newFuncDefinition)
}
@@ -439,10 +440,10 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* This assumes no database is specified in `funcDefinition`.
*/
def createTempFunction(funcDefinition: CatalogFunction, ignoreIfExists: Boolean): Unit = {
- require(funcDefinition.name.database.isEmpty,
+ require(funcDefinition.identifier.database.isEmpty,
"attempted to create a temporary function while specifying a database")
- val name = funcDefinition.name.funcName
- if (tempFunctions.containsKey(name) && !ignoreIfExists) {
+ val name = funcDefinition.identifier.funcName
+ if (tempFunctions.contains(name) && !ignoreIfExists) {
throw new AnalysisException(s"Temporary function '$name' already exists.")
}
tempFunctions.put(name, funcDefinition)
@@ -455,7 +456,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
// Hive has DROP FUNCTION and DROP TEMPORARY FUNCTION. We may want to consolidate
// dropFunction and dropTempFunction.
def dropTempFunction(name: String, ignoreIfNotExists: Boolean): Unit = {
- if (!tempFunctions.containsKey(name) && !ignoreIfNotExists) {
+ if (!tempFunctions.contains(name) && !ignoreIfNotExists) {
throw new AnalysisException(
s"Temporary function '$name' cannot be dropped because it does not exist!")
}
@@ -476,11 +477,12 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
throw new AnalysisException("rename does not support moving functions across databases")
}
val db = oldName.database.getOrElse(currentDb)
- if (oldName.database.isDefined || !tempFunctions.containsKey(oldName.funcName)) {
+ if (oldName.database.isDefined || !tempFunctions.contains(oldName.funcName)) {
externalCatalog.renameFunction(db, oldName.funcName, newName.funcName)
} else {
- val func = tempFunctions.remove(oldName.funcName)
- val newFunc = func.copy(name = func.name.copy(funcName = newName.funcName))
+ val func = tempFunctions(oldName.funcName)
+ val newFunc = func.copy(identifier = func.identifier.copy(funcName = newName.funcName))
+ tempFunctions.remove(oldName.funcName)
tempFunctions.put(newName.funcName, newFunc)
}
}
@@ -494,10 +496,10 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
*/
def getFunction(name: FunctionIdentifier): CatalogFunction = {
val db = name.database.getOrElse(currentDb)
- if (name.database.isDefined || !tempFunctions.containsKey(name.funcName)) {
+ if (name.database.isDefined || !tempFunctions.contains(name.funcName)) {
externalCatalog.getFunction(db, name.funcName)
} else {
- tempFunctions.get(name.funcName)
+ tempFunctions(name.funcName)
}
}
@@ -510,7 +512,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
val dbFunctions =
externalCatalog.listFunctions(db, pattern).map { f => FunctionIdentifier(f, Some(db)) }
val regex = pattern.replaceAll("\\*", ".*").r
- val _tempFunctions = tempFunctions.keys().asScala
+ val _tempFunctions = tempFunctions.keys.toSeq
.filter { f => regex.pattern.matcher(f).matches() }
.map { f => FunctionIdentifier(f) }
dbFunctions ++ _tempFunctions
@@ -520,7 +522,7 @@ class SessionCatalog(externalCatalog: ExternalCatalog, conf: CatalystConf) {
* Return a temporary function. For testing only.
*/
private[catalog] def getTempFunction(name: String): Option[CatalogFunction] = {
- Option(tempFunctions.get(name))
+ tempFunctions.get(name)
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
index 34803133f6..8bb8e09a28 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
@@ -169,10 +169,10 @@ abstract class ExternalCatalog {
/**
* A function defined in the catalog.
*
- * @param name name of the function
+ * @param identifier name of the function
* @param className fully qualified class name, e.g. "org.apache.spark.util.MyFunc"
*/
-case class CatalogFunction(name: FunctionIdentifier, className: String)
+case class CatalogFunction(identifier: FunctionIdentifier, className: String)
/**
@@ -216,7 +216,7 @@ case class CatalogTablePartition(
* future once we have a better understanding of how we want to handle skewed columns.
*/
case class CatalogTable(
- name: TableIdentifier,
+ identifier: TableIdentifier,
tableType: CatalogTableType,
storage: CatalogStorageFormat,
schema: Seq[CatalogColumn],
@@ -230,12 +230,12 @@ case class CatalogTable(
viewText: Option[String] = None) {
/** Return the database this table was specified to belong to, assuming it exists. */
- def database: String = name.database.getOrElse {
- throw new AnalysisException(s"table $name did not specify database")
+ def database: String = identifier.database.getOrElse {
+ throw new AnalysisException(s"table $identifier did not specify database")
}
/** Return the fully qualified name of this table, assuming the database was specified. */
- def qualifiedName: String = name.unquotedString
+ def qualifiedName: String = identifier.unquotedString
/** Syntactic sugar to update a field in `storage`. */
def withNewStorage(
@@ -290,6 +290,6 @@ case class CatalogRelation(
// TODO: implement this
override def output: Seq[Attribute] = Seq.empty
- require(metadata.name.database == Some(db),
+ require(metadata.identifier.database == Some(db),
"provided database does not much the one specified in the table definition")
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index 6fa4beed99..34cb97699d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -31,7 +31,7 @@ trait AnalysisTest extends PlanTest {
private def makeAnalyzer(caseSensitive: Boolean): Analyzer = {
val conf = new SimpleCatalystConf(caseSensitive)
val catalog = new SessionCatalog(new InMemoryCatalog, conf)
- catalog.createTempTable("TaBlE", TestRelations.testRelation, ignoreIfExists = true)
+ catalog.createTempTable("TaBlE", TestRelations.testRelation, overrideIfExists = true)
new Analyzer(catalog, EmptyFunctionRegistry, conf) {
override val extendedResolutionRules = EliminateSubqueryAliases :: Nil
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
index 31501864a8..6c08ccc34c 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
@@ -52,7 +52,7 @@ class DecimalPrecisionSuite extends PlanTest with BeforeAndAfter {
private val b: Expression = UnresolvedAttribute("b")
before {
- catalog.createTempTable("table", relation, ignoreIfExists = true)
+ catalog.createTempTable("table", relation, overrideIfExists = true)
}
private def checkType(expression: Expression, expectedType: DataType): Unit = {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
index 277c2d717e..959bd564d9 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala
@@ -210,7 +210,7 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach {
}
test("get table") {
- assert(newBasicCatalog().getTable("db2", "tbl1").name.table == "tbl1")
+ assert(newBasicCatalog().getTable("db2", "tbl1").identifier.table == "tbl1")
}
test("get table when database/table does not exist") {
@@ -452,7 +452,7 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach {
assert(catalog.getFunction("db2", "func1").className == funcClass)
catalog.renameFunction("db2", "func1", newName)
intercept[AnalysisException] { catalog.getFunction("db2", "func1") }
- assert(catalog.getFunction("db2", newName).name.funcName == newName)
+ assert(catalog.getFunction("db2", newName).identifier.funcName == newName)
assert(catalog.getFunction("db2", newName).className == funcClass)
intercept[AnalysisException] { catalog.renameFunction("db2", "does_not_exist", "me") }
}
@@ -549,7 +549,7 @@ abstract class CatalogTestUtils {
def newTable(name: String, database: Option[String] = None): CatalogTable = {
CatalogTable(
- name = TableIdentifier(name, database),
+ identifier = TableIdentifier(name, database),
tableType = CatalogTableType.EXTERNAL_TABLE,
storage = storageFormat,
schema = Seq(CatalogColumn("col1", "int"), CatalogColumn("col2", "string")),
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 74e995cc5b..2948c5f8bd 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -197,17 +197,17 @@ class SessionCatalogSuite extends SparkFunSuite {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable1 = Range(1, 10, 1, 10, Seq())
val tempTable2 = Range(1, 20, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable1, ignoreIfExists = false)
- catalog.createTempTable("tbl2", tempTable2, ignoreIfExists = false)
+ catalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
+ catalog.createTempTable("tbl2", tempTable2, overrideIfExists = false)
assert(catalog.getTempTable("tbl1") == Some(tempTable1))
assert(catalog.getTempTable("tbl2") == Some(tempTable2))
assert(catalog.getTempTable("tbl3") == None)
// Temporary table already exists
intercept[AnalysisException] {
- catalog.createTempTable("tbl1", tempTable1, ignoreIfExists = false)
+ catalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
}
// Temporary table already exists but we override it
- catalog.createTempTable("tbl1", tempTable2, ignoreIfExists = true)
+ catalog.createTempTable("tbl1", tempTable2, overrideIfExists = true)
assert(catalog.getTempTable("tbl1") == Some(tempTable2))
}
@@ -243,7 +243,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10, Seq())
- sessionCatalog.createTempTable("tbl1", tempTable, ignoreIfExists = false)
+ sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
@@ -255,7 +255,7 @@ class SessionCatalogSuite extends SparkFunSuite {
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false)
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
// If database is specified, temp tables are never dropped
- sessionCatalog.createTempTable("tbl1", tempTable, ignoreIfExists = false)
+ sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false)
assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
@@ -299,7 +299,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val externalCatalog = newBasicCatalog()
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable = Range(1, 10, 2, 10, Seq())
- sessionCatalog.createTempTable("tbl1", tempTable, ignoreIfExists = false)
+ sessionCatalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
@@ -327,7 +327,7 @@ class SessionCatalogSuite extends SparkFunSuite {
assert(newTbl1.properties.get("toh") == Some("frem"))
// Alter table without explicitly specifying database
sessionCatalog.setCurrentDatabase("db2")
- sessionCatalog.alterTable(tbl1.copy(name = TableIdentifier("tbl1")))
+ sessionCatalog.alterTable(tbl1.copy(identifier = TableIdentifier("tbl1")))
val newestTbl1 = externalCatalog.getTable("db2", "tbl1")
assert(newestTbl1 == tbl1)
}
@@ -368,7 +368,7 @@ class SessionCatalogSuite extends SparkFunSuite {
val sessionCatalog = new SessionCatalog(externalCatalog)
val tempTable1 = Range(1, 10, 1, 10, Seq())
val metastoreTable1 = externalCatalog.getTable("db2", "tbl1")
- sessionCatalog.createTempTable("tbl1", tempTable1, ignoreIfExists = false)
+ sessionCatalog.createTempTable("tbl1", tempTable1, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
// If we explicitly specify the database, we'll look up the relation in that database
assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1", Some("db2")))
@@ -406,7 +406,7 @@ class SessionCatalogSuite extends SparkFunSuite {
assert(!catalog.tableExists(TableIdentifier("tbl2", Some("db1"))))
// If database is explicitly specified, do not check temporary tables
val tempTable = Range(1, 10, 1, 10, Seq())
- catalog.createTempTable("tbl3", tempTable, ignoreIfExists = false)
+ catalog.createTempTable("tbl3", tempTable, overrideIfExists = false)
assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2"))))
// If database is not explicitly specified, check the current database
catalog.setCurrentDatabase("db2")
@@ -418,8 +418,8 @@ class SessionCatalogSuite extends SparkFunSuite {
test("list tables without pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable, ignoreIfExists = false)
- catalog.createTempTable("tbl4", tempTable, ignoreIfExists = false)
+ catalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
+ catalog.createTempTable("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1").toSet ==
Set(TableIdentifier("tbl1"), TableIdentifier("tbl4")))
assert(catalog.listTables("db2").toSet ==
@@ -435,8 +435,8 @@ class SessionCatalogSuite extends SparkFunSuite {
test("list tables with pattern") {
val catalog = new SessionCatalog(newBasicCatalog())
val tempTable = Range(1, 10, 2, 10, Seq())
- catalog.createTempTable("tbl1", tempTable, ignoreIfExists = false)
- catalog.createTempTable("tbl4", tempTable, ignoreIfExists = false)
+ catalog.createTempTable("tbl1", tempTable, overrideIfExists = false)
+ catalog.createTempTable("tbl4", tempTable, overrideIfExists = false)
assert(catalog.listTables("db1", "*").toSet == catalog.listTables("db1").toSet)
assert(catalog.listTables("db2", "*").toSet == catalog.listTables("db2").toSet)
assert(catalog.listTables("db2", "tbl*").toSet ==
@@ -826,7 +826,7 @@ class SessionCatalogSuite extends SparkFunSuite {
sessionCatalog.createFunction(newFunc("func1", Some("db2")))
sessionCatalog.renameFunction(FunctionIdentifier("func1"), FunctionIdentifier("func4"))
assert(sessionCatalog.getTempFunction("func4") ==
- Some(tempFunc.copy(name = FunctionIdentifier("func4"))))
+ Some(tempFunc.copy(identifier = FunctionIdentifier("func4"))))
assert(sessionCatalog.getTempFunction("func1") == None)
assert(externalCatalog.listFunctions("db2", "*").toSet == Set("func1", "func3"))
// Then, if no such temporary function exist, rename the function in the current database