aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-10-10 15:48:57 +0800
committerWenchen Fan <wenchen@databricks.com>2016-10-10 15:48:57 +0800
commit23ddff4b2b2744c3dc84d928e144c541ad5df376 (patch)
treef61b64ea46adbd1eb424a0bbb8e8e383d1ee4e3b /sql/core/src/test
parent16590030c15b32e83b584283697b6f783cffe043 (diff)
downloadspark-23ddff4b2b2744c3dc84d928e144c541ad5df376.tar.gz
spark-23ddff4b2b2744c3dc84d928e144c541ad5df376.tar.bz2
spark-23ddff4b2b2744c3dc84d928e144c541ad5df376.zip
[SPARK-17338][SQL] add global temp view
## What changes were proposed in this pull request? Global temporary view is a cross-session temporary view, which means it's shared among all sessions. Its lifetime is the lifetime of the Spark application, i.e. it will be automatically dropped when the application terminates. It's tied to a system preserved database `global_temp`(configurable via SparkConf), and we must use the qualified name to refer a global temp view, e.g. SELECT * FROM global_temp.view1. changes for `SessionCatalog`: 1. add a new field `gloabalTempViews: GlobalTempViewManager`, to access the shared global temp views, and the global temp db name. 2. `createDatabase` will fail if users wanna create `global_temp`, which is system preserved. 3. `setCurrentDatabase` will fail if users wanna set `global_temp`, which is system preserved. 4. add `createGlobalTempView`, which is used in `CreateViewCommand` to create global temp views. 5. add `dropGlobalTempView`, which is used in `CatalogImpl` to drop global temp view. 6. add `alterTempViewDefinition`, which is used in `AlterViewAsCommand` to update the view definition for local/global temp views. 7. `renameTable`/`dropTable`/`isTemporaryTable`/`lookupRelation`/`getTempViewOrPermanentTableMetadata`/`refreshTable` will handle global temp views. changes for SQL commands: 1. `CreateViewCommand`/`AlterViewAsCommand` is updated to support global temp views 2. `ShowTablesCommand` outputs a new column `database`, which is used to distinguish global and local temp views. 3. other commands can also handle global temp views if they call `SessionCatalog` APIs which accepts global temp views, e.g. `DropTableCommand`, `AlterTableRenameCommand`, `ShowColumnsCommand`, etc. changes for other public API 1. add a new method `dropGlobalTempView` in `Catalog` 2. `Catalog.findTable` can find global temp view 3. add a new method `createGlobalTempView` in `Dataset` ## How was this patch tested? new tests in `SQLViewSuite` Author: Wenchen Fan <wenchen@databricks.com> Closes #14897 from cloud-fan/global-temp-view.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala11
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala168
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala10
3 files changed, 179 insertions, 10 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala
index 001c1a1d85..2b35db411e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala
@@ -88,11 +88,11 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext {
df.createOrReplaceTempView("listtablessuitetable")
assert(
sqlContext.tables().filter("tableName = 'listtablessuitetable'").collect().toSeq ==
- Row("listtablessuitetable", true) :: Nil)
+ Row("", "listtablessuitetable", true) :: Nil)
assert(
sqlContext.sql("SHOW tables").filter("tableName = 'listtablessuitetable'").collect().toSeq ==
- Row("listtablessuitetable", true) :: Nil)
+ Row("", "listtablessuitetable", true) :: Nil)
sqlContext.sessionState.catalog.dropTable(
TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false)
@@ -105,11 +105,11 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext {
df.createOrReplaceTempView("listtablessuitetable")
assert(
sqlContext.tables("default").filter("tableName = 'listtablessuitetable'").collect().toSeq ==
- Row("listtablessuitetable", true) :: Nil)
+ Row("", "listtablessuitetable", true) :: Nil)
assert(
sqlContext.sql("show TABLES in default").filter("tableName = 'listtablessuitetable'")
- .collect().toSeq == Row("listtablessuitetable", true) :: Nil)
+ .collect().toSeq == Row("", "listtablessuitetable", true) :: Nil)
sqlContext.sessionState.catalog.dropTable(
TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false)
@@ -122,7 +122,8 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext {
df.createOrReplaceTempView("listtablessuitetable")
val expectedSchema = StructType(
- StructField("tableName", StringType, false) ::
+ StructField("database", StringType, false) ::
+ StructField("tableName", StringType, false) ::
StructField("isTemporary", BooleanType, false) :: Nil)
Seq(sqlContext.tables(), sqlContext.sql("SHOW TABLes")).foreach {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala
new file mode 100644
index 0000000000..391bcb8b35
--- /dev/null
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/GlobalTempViewSuite.scala
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution
+
+import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
+import org.apache.spark.sql.catalog.Table
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+import org.apache.spark.sql.test.SharedSQLContext
+import org.apache.spark.sql.types.StructType
+
+class GlobalTempViewSuite extends QueryTest with SharedSQLContext {
+ import testImplicits._
+
+ override protected def beforeAll(): Unit = {
+ super.beforeAll()
+ globalTempDB = spark.sharedState.globalTempViewManager.database
+ }
+
+ private var globalTempDB: String = _
+
+ test("basic semantic") {
+ sql("CREATE GLOBAL TEMP VIEW src AS SELECT 1, 'a'")
+
+ // If there is no database in table name, we should try local temp view first, if not found,
+ // try table/view in current database, which is "default" in this case. So we expect
+ // NoSuchTableException here.
+ intercept[NoSuchTableException](spark.table("src"))
+
+ // Use qualified name to refer to the global temp view explicitly.
+ checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a"))
+
+ // Table name without database will never refer to a global temp view.
+ intercept[NoSuchTableException](sql("DROP VIEW src"))
+
+ sql(s"DROP VIEW $globalTempDB.src")
+ // The global temp view should be dropped successfully.
+ intercept[NoSuchTableException](spark.table(s"$globalTempDB.src"))
+
+ // We can also use Dataset API to create global temp view
+ Seq(1 -> "a").toDF("i", "j").createGlobalTempView("src")
+ checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a"))
+
+ // Use qualified name to rename a global temp view.
+ sql(s"ALTER VIEW $globalTempDB.src RENAME TO src2")
+ intercept[NoSuchTableException](spark.table(s"$globalTempDB.src"))
+ checkAnswer(spark.table(s"$globalTempDB.src2"), Row(1, "a"))
+
+ // Use qualified name to alter a global temp view.
+ sql(s"ALTER VIEW $globalTempDB.src2 AS SELECT 2, 'b'")
+ checkAnswer(spark.table(s"$globalTempDB.src2"), Row(2, "b"))
+
+ // We can also use Catalog API to drop global temp view
+ spark.catalog.dropGlobalTempView("src2")
+ intercept[NoSuchTableException](spark.table(s"$globalTempDB.src2"))
+ }
+
+ test("global temp view is shared among all sessions") {
+ try {
+ sql("CREATE GLOBAL TEMP VIEW src AS SELECT 1, 2")
+ checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, 2))
+ val newSession = spark.newSession()
+ checkAnswer(newSession.table(s"$globalTempDB.src"), Row(1, 2))
+ } finally {
+ spark.catalog.dropGlobalTempView("src")
+ }
+ }
+
+ test("global temp view database should be preserved") {
+ val e = intercept[AnalysisException](sql(s"CREATE DATABASE $globalTempDB"))
+ assert(e.message.contains("system preserved database"))
+
+ val e2 = intercept[AnalysisException](sql(s"USE $globalTempDB"))
+ assert(e2.message.contains("system preserved database"))
+ }
+
+ test("CREATE GLOBAL TEMP VIEW USING") {
+ withTempPath { path =>
+ try {
+ Seq(1 -> "a").toDF("i", "j").write.parquet(path.getAbsolutePath)
+ sql(s"CREATE GLOBAL TEMP VIEW src USING parquet OPTIONS (PATH '${path.getAbsolutePath}')")
+ checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a"))
+ sql(s"INSERT INTO $globalTempDB.src SELECT 2, 'b'")
+ checkAnswer(spark.table(s"$globalTempDB.src"), Row(1, "a") :: Row(2, "b") :: Nil)
+ } finally {
+ spark.catalog.dropGlobalTempView("src")
+ }
+ }
+ }
+
+ test("CREATE TABLE LIKE should work for global temp view") {
+ try {
+ sql("CREATE GLOBAL TEMP VIEW src AS SELECT 1 AS a, '2' AS b")
+ sql(s"CREATE TABLE cloned LIKE ${globalTempDB}.src")
+ val tableMeta = spark.sessionState.catalog.getTableMetadata(TableIdentifier("cloned"))
+ assert(tableMeta.schema == new StructType().add("a", "int", false).add("b", "string", false))
+ } finally {
+ spark.catalog.dropGlobalTempView("src")
+ sql("DROP TABLE default.cloned")
+ }
+ }
+
+ test("list global temp views") {
+ try {
+ sql("CREATE GLOBAL TEMP VIEW v1 AS SELECT 3, 4")
+ sql("CREATE TEMP VIEW v2 AS SELECT 1, 2")
+
+ checkAnswer(sql(s"SHOW TABLES IN $globalTempDB"),
+ Row(globalTempDB, "v1", true) ::
+ Row("", "v2", true) :: Nil)
+
+ assert(spark.catalog.listTables(globalTempDB).collect().toSeq.map(_.name) == Seq("v1", "v2"))
+ } finally {
+ spark.catalog.dropTempView("v1")
+ spark.catalog.dropGlobalTempView("v2")
+ }
+ }
+
+ test("should lookup global temp view if and only if global temp db is specified") {
+ try {
+ sql("CREATE GLOBAL TEMP VIEW same_name AS SELECT 3, 4")
+ sql("CREATE TEMP VIEW same_name AS SELECT 1, 2")
+
+ checkAnswer(sql("SELECT * FROM same_name"), Row(1, 2))
+
+ // we never lookup global temp views if database is not specified in table name
+ spark.catalog.dropTempView("same_name")
+ intercept[AnalysisException](sql("SELECT * FROM same_name"))
+
+ // Use qualified name to lookup a global temp view.
+ checkAnswer(sql(s"SELECT * FROM $globalTempDB.same_name"), Row(3, 4))
+ } finally {
+ spark.catalog.dropTempView("same_name")
+ spark.catalog.dropGlobalTempView("same_name")
+ }
+ }
+
+ test("public Catalog should recognize global temp view") {
+ try {
+ sql("CREATE GLOBAL TEMP VIEW src AS SELECT 1, 2")
+
+ assert(spark.catalog.tableExists(globalTempDB, "src"))
+ assert(spark.catalog.getTable(globalTempDB, "src").toString == new Table(
+ name = "src",
+ database = globalTempDB,
+ description = null,
+ tableType = "TEMPORARY",
+ isTemporary = true).toString)
+ } finally {
+ spark.catalog.dropGlobalTempView("src")
+ }
+ }
+}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 1bcb810a15..19885156cc 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -969,17 +969,17 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
""".stripMargin)
checkAnswer(
sql("SHOW TABLES IN default 'show1*'"),
- Row("show1a", true) :: Nil)
+ Row("", "show1a", true) :: Nil)
checkAnswer(
sql("SHOW TABLES IN default 'show1*|show2*'"),
- Row("show1a", true) ::
- Row("show2b", true) :: Nil)
+ Row("", "show1a", true) ::
+ Row("", "show2b", true) :: Nil)
checkAnswer(
sql("SHOW TABLES 'show1*|show2*'"),
- Row("show1a", true) ::
- Row("show2b", true) :: Nil)
+ Row("", "show1a", true) ::
+ Row("", "show2b", true) :: Nil)
assert(
sql("SHOW TABLES").count() >= 2)