aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-09-18 21:15:35 +0800
committerWenchen Fan <wenchen@databricks.com>2016-09-18 21:15:35 +0800
commit3fe630d314cf50d69868b7707ac8d8d2027080b8 (patch)
tree9108a526cf2d18ddec7c5e2278e38c3849a54773 /sql/catalyst
parent3a3c9ffbd282244407e9437c2b02ae7e062dd183 (diff)
downloadspark-3fe630d314cf50d69868b7707ac8d8d2027080b8.tar.gz
spark-3fe630d314cf50d69868b7707ac8d8d2027080b8.tar.bz2
spark-3fe630d314cf50d69868b7707ac8d8d2027080b8.zip
[SPARK-17541][SQL] fix some DDL bugs about table management when same-name temp view exists
## What changes were proposed in this pull request? In `SessionCatalog`, we have several operations(`tableExists`, `dropTable`, `loopupRelation`, etc) that handle both temp views and metastore tables/views. This brings some bugs to DDL commands that want to handle temp view only or metastore table/view only. These bugs are: 1. `CREATE TABLE USING` will fail if a same-name temp view exists 2. `Catalog.dropTempView`will un-cache and drop metastore table if a same-name table exists 3. `saveAsTable` will fail or have unexpected behaviour if a same-name temp view exists. These bug fixes are pulled out from https://github.com/apache/spark/pull/14962 and targets both master and 2.0 branch ## How was this patch tested? new regression tests Author: Wenchen Fan <wenchen@databricks.com> Closes #15099 from cloud-fan/fix-view.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala32
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala24
2 files changed, 33 insertions, 23 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 9fb5db573b..574c3d7eee 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -325,9 +325,9 @@ class SessionCatalog(
new Path(new Path(dbLocation), formatTableName(tableIdent.table)).toString
}
- // -------------------------------------------------------------
- // | Methods that interact with temporary and metastore tables |
- // -------------------------------------------------------------
+ // ----------------------------------------------
+ // | Methods that interact with temp views only |
+ // ----------------------------------------------
/**
* Create a temporary table.
@@ -344,6 +344,24 @@ class SessionCatalog(
}
/**
+ * Return a temporary view exactly as it was stored.
+ */
+ def getTempView(name: String): Option[LogicalPlan] = synchronized {
+ tempTables.get(formatTableName(name))
+ }
+
+ /**
+ * Drop a temporary view.
+ */
+ def dropTempView(name: String): Unit = synchronized {
+ tempTables.remove(formatTableName(name))
+ }
+
+ // -------------------------------------------------------------
+ // | Methods that interact with temporary and metastore tables |
+ // -------------------------------------------------------------
+
+ /**
* Rename a table.
*
* If a database is specified in `oldName`, this will rename the table in that database.
@@ -492,14 +510,6 @@ class SessionCatalog(
tempTables.clear()
}
- /**
- * Return a temporary table exactly as it was stored.
- * For testing only.
- */
- private[catalog] def getTempTable(name: String): Option[LogicalPlan] = synchronized {
- tempTables.get(formatTableName(name))
- }
-
// ----------------------------------------------------------------------------
// Partitions
// ----------------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 012df629bb..84b77ad250 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -201,16 +201,16 @@ class SessionCatalogSuite extends SparkFunSuite {
val tempTable2 = Range(1, 20, 2, 10)
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
catalog.createTempView("tbl2", tempTable2, overrideIfExists = false)
- assert(catalog.getTempTable("tbl1") == Option(tempTable1))
- assert(catalog.getTempTable("tbl2") == Option(tempTable2))
- assert(catalog.getTempTable("tbl3").isEmpty)
+ assert(catalog.getTempView("tbl1") == Option(tempTable1))
+ assert(catalog.getTempView("tbl2") == Option(tempTable2))
+ assert(catalog.getTempView("tbl3").isEmpty)
// Temporary table already exists
intercept[TempTableAlreadyExistsException] {
catalog.createTempView("tbl1", tempTable1, overrideIfExists = false)
}
// Temporary table already exists but we override it
catalog.createTempView("tbl1", tempTable2, overrideIfExists = true)
- assert(catalog.getTempTable("tbl1") == Option(tempTable2))
+ assert(catalog.getTempView("tbl1") == Option(tempTable2))
}
test("drop table") {
@@ -251,11 +251,11 @@ class SessionCatalogSuite extends SparkFunSuite {
val tempTable = Range(1, 10, 2, 10)
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
- assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
+ assert(sessionCatalog.getTempView("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is not specified, temp table should be dropped first
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
- assert(sessionCatalog.getTempTable("tbl1") == None)
+ assert(sessionCatalog.getTempView("tbl1") == None)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If temp table does not exist, the table in the current database should be dropped
sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false)
@@ -265,7 +265,7 @@ class SessionCatalogSuite extends SparkFunSuite {
sessionCatalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false,
purge = false)
- assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable))
+ assert(sessionCatalog.getTempView("tbl1") == Some(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl2"))
}
@@ -303,17 +303,17 @@ class SessionCatalogSuite extends SparkFunSuite {
val tempTable = Range(1, 10, 2, 10)
sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false)
sessionCatalog.setCurrentDatabase("db2")
- assert(sessionCatalog.getTempTable("tbl1") == Option(tempTable))
+ assert(sessionCatalog.getTempView("tbl1") == Option(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is not specified, temp table should be renamed first
sessionCatalog.renameTable(TableIdentifier("tbl1"), "tbl3")
- assert(sessionCatalog.getTempTable("tbl1").isEmpty)
- assert(sessionCatalog.getTempTable("tbl3") == Option(tempTable))
+ assert(sessionCatalog.getTempView("tbl1").isEmpty)
+ assert(sessionCatalog.getTempView("tbl3") == Option(tempTable))
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
// If database is specified, temp tables are never renamed
sessionCatalog.renameTable(TableIdentifier("tbl2", Some("db2")), "tbl4")
- assert(sessionCatalog.getTempTable("tbl3") == Option(tempTable))
- assert(sessionCatalog.getTempTable("tbl4").isEmpty)
+ assert(sessionCatalog.getTempView("tbl3") == Option(tempTable))
+ assert(sessionCatalog.getTempView("tbl4").isEmpty)
assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl4"))
}