aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorXin Wu <xinwu@us.ibm.com>2017-02-13 19:45:58 -0800
committerXiao Li <gatorsmile@gmail.com>2017-02-13 19:45:58 -0800
commit1ab97310e83ee138a1b210c0dfa89a341f1d530a (patch)
treed73150111baf59f4c16af65cf4ea8a3c1607ee4e /sql
parent6e45b547ceadbbe8394bf149945b7942df82660a (diff)
downloadspark-1ab97310e83ee138a1b210c0dfa89a341f1d530a.tar.gz
spark-1ab97310e83ee138a1b210c0dfa89a341f1d530a.tar.bz2
spark-1ab97310e83ee138a1b210c0dfa89a341f1d530a.zip
[SPARK-19539][SQL] Block duplicate temp table during creation
## What changes were proposed in this pull request? Current `CREATE TEMPORARY TABLE ... ` is deprecated and recommend users to use `CREATE TEMPORARY VIEW ...` And it does not support `IF NOT EXISTS `clause. However, if there is an existing temporary view defined, it is possible to unintentionally replace this existing view by issuing `CREATE TEMPORARY TABLE ...` with the same table/view name. This PR is to disallow `CREATE TEMPORARY TABLE ...` with an existing view name. Under the cover, `CREATE TEMPORARY TABLE ...` will be changed to create temporary view, however, passing in a flag `replace=false`, instead of currently `true`. So when creating temporary view under the cover, if there is existing view with the same name, the operation will be blocked. ## How was this patch tested? New unit test case is added and updated some existing test cases to adapt the new behavior Author: Xin Wu <xinwu@us.ibm.com> Closes #16878 from xwu0226/block_duplicate_temp_table.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala18
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala58
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala105
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala64
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala14
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala4
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala8
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala18
10 files changed, 160 insertions, 137 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index ca76a10f79..d508002352 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -425,7 +425,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
logWarning(s"CREATE TEMPORARY TABLE ... USING ... is deprecated, please use " +
"CREATE TEMPORARY VIEW ... USING ... instead")
- CreateTempViewUsing(table, schema, replace = true, global = false, provider, options)
+ // Unlike CREATE TEMPORARY VIEW USING, CREATE TEMPORARY TABLE USING does not support
+ // IF NOT EXISTS. Users are not allowed to replace the existing temp table.
+ CreateTempViewUsing(table, schema, replace = false, global = false, provider, options)
} else {
CreateTable(tableDesc, mode, None)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 9c95b12795..40d0ce0992 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1571,7 +1571,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
}
}
- test("specifying database name for a temporary table is not allowed") {
+ test("specifying database name for a temporary view is not allowed") {
withTempPath { dir =>
val path = dir.toURI.toString
val df =
@@ -1585,23 +1585,23 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
intercept[AnalysisException] {
spark.sql(
s"""
- |CREATE TEMPORARY TABLE db.t
- |USING parquet
- |OPTIONS (
- | path '$path'
- |)
- """.stripMargin)
+ |CREATE TEMPORARY VIEW db.t
+ |USING parquet
+ |OPTIONS (
+ | path '$path'
+ |)
+ """.stripMargin)
}.getMessage
// If you use backticks to quote the name then it's OK.
spark.sql(
s"""
- |CREATE TEMPORARY TABLE `db.t`
+ |CREATE TEMPORARY VIEW `db.t`
|USING parquet
|OPTIONS (
| path '$path'
|)
- """.stripMargin)
+ """.stripMargin)
checkAnswer(spark.table("`db.t`"), df)
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index bcb707c8fd..278d247250 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -903,24 +903,24 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
withTempView("show1a", "show2b") {
sql(
"""
- |CREATE TEMPORARY TABLE show1a
- |USING org.apache.spark.sql.sources.DDLScanSource
- |OPTIONS (
- | From '1',
- | To '10',
- | Table 'test1'
- |
- |)
+ |CREATE TEMPORARY VIEW show1a
+ |USING org.apache.spark.sql.sources.DDLScanSource
+ |OPTIONS (
+ | From '1',
+ | To '10',
+ | Table 'test1'
+ |
+ |)
""".stripMargin)
sql(
"""
- |CREATE TEMPORARY TABLE show2b
- |USING org.apache.spark.sql.sources.DDLScanSource
- |OPTIONS (
- | From '1',
- | To '10',
- | Table 'test1'
- |)
+ |CREATE TEMPORARY VIEW show2b
+ |USING org.apache.spark.sql.sources.DDLScanSource
+ |OPTIONS (
+ | From '1',
+ | To '10',
+ | Table 'test1'
+ |)
""".stripMargin)
assert(
sql("SHOW TABLE EXTENDED LIKE 'show*'").count() >= 2)
@@ -958,20 +958,20 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
Nil)
}
- test("drop table - temporary table") {
+ test("drop view - temporary view") {
val catalog = spark.sessionState.catalog
sql(
"""
- |CREATE TEMPORARY TABLE tab1
- |USING org.apache.spark.sql.sources.DDLScanSource
- |OPTIONS (
- | From '1',
- | To '10',
- | Table 'test1'
- |)
+ |CREATE TEMPORARY VIEW tab1
+ |USING org.apache.spark.sql.sources.DDLScanSource
+ |OPTIONS (
+ | From '1',
+ | To '10',
+ | Table 'test1'
+ |)
""".stripMargin)
assert(catalog.listTables("default") == Seq(TableIdentifier("tab1")))
- sql("DROP TABLE tab1")
+ sql("DROP VIEW tab1")
assert(catalog.listTables("default") == Nil)
}
@@ -1690,6 +1690,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("block creating duplicate temp table") {
+ withView("t_temp") {
+ sql("CREATE TEMPORARY VIEW t_temp AS SELECT 1, 2")
+ val e = intercept[TempTableAlreadyExistsException] {
+ sql("CREATE TEMPORARY TABLE t_temp (c3 int, c4 string) USING JSON")
+ }.getMessage
+ assert(e.contains("Temporary table 't_temp' already exists"))
+ }
+ }
+
test("truncate table - external table, temporary table, view (not allowed)") {
import testImplicits._
withTempPath { tempDir =>
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
index d9afa46353..e8bf21a2a9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/RowDataSourceStrategySuite.scala
@@ -52,10 +52,10 @@ class RowDataSourceStrategySuite extends SparkFunSuite with BeforeAndAfter with
conn.commit()
sql(
s"""
- |CREATE TEMPORARY TABLE inttypes
+ |CREATE OR REPLACE TEMPORARY VIEW inttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
}
after {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
index 491ff72337..df9cebbe58 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
@@ -186,16 +186,17 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}
test("test different encoding") {
- // scalastyle:off
- spark.sql(
- s"""
- |CREATE TEMPORARY TABLE carsTable USING csv
- |OPTIONS (path "${testFile(carsFile8859)}", header "true",
- |charset "iso-8859-1", delimiter "þ")
- """.stripMargin.replaceAll("\n", " "))
- // scalastyle:on
-
- verifyCars(spark.table("carsTable"), withHeader = true)
+ withView("carsTable") {
+ // scalastyle:off
+ spark.sql(
+ s"""
+ |CREATE TEMPORARY VIEW carsTable USING csv
+ |OPTIONS (path "${testFile(carsFile8859)}", header "true",
+ |charset "iso-8859-1", delimiter "þ")
+ """.stripMargin.replaceAll("\n", " "))
+ // scalastyle:on
+ verifyCars(spark.table("carsTable"), withHeader = true)
+ }
}
test("test aliases sep and encoding for delimiter and charset") {
@@ -213,27 +214,31 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}
test("DDL test with tab separated file") {
- spark.sql(
- s"""
- |CREATE TEMPORARY TABLE carsTable USING csv
- |OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
- """.stripMargin.replaceAll("\n", " "))
-
- verifyCars(spark.table("carsTable"), numFields = 6, withHeader = true, checkHeader = false)
+ withView("carsTable") {
+ spark.sql(
+ s"""
+ |CREATE TEMPORARY VIEW carsTable USING csv
+ |OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
+ """.stripMargin.replaceAll("\n", " "))
+
+ verifyCars(spark.table("carsTable"), numFields = 6, withHeader = true, checkHeader = false)
+ }
}
test("DDL test parsing decimal type") {
- spark.sql(
- s"""
- |CREATE TEMPORARY TABLE carsTable
- |(yearMade double, makeName string, modelName string, priceTag decimal,
- | comments string, grp string)
- |USING csv
- |OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
- """.stripMargin.replaceAll("\n", " "))
-
- assert(
- spark.sql("SELECT makeName FROM carsTable where priceTag > 60000").collect().size === 1)
+ withView("carsTable") {
+ spark.sql(
+ s"""
+ |CREATE TEMPORARY VIEW carsTable
+ |(yearMade double, makeName string, modelName string, priceTag decimal,
+ | comments string, grp string)
+ |USING csv
+ |OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
+ """.stripMargin.replaceAll("\n", " "))
+
+ assert(
+ spark.sql("SELECT makeName FROM carsTable where priceTag > 60000").collect().size === 1)
+ }
}
test("test for DROPMALFORMED parsing mode") {
@@ -300,28 +305,34 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}
test("DDL test with empty file") {
- spark.sql(s"""
- |CREATE TEMPORARY TABLE carsTable
- |(yearMade double, makeName string, modelName string, comments string, grp string)
- |USING csv
- |OPTIONS (path "${testFile(emptyFile)}", header "false")
- """.stripMargin.replaceAll("\n", " "))
-
- assert(spark.sql("SELECT count(*) FROM carsTable").collect().head(0) === 0)
+ withView("carsTable") {
+ spark.sql(
+ s"""
+ |CREATE TEMPORARY VIEW carsTable
+ |(yearMade double, makeName string, modelName string, comments string, grp string)
+ |USING csv
+ |OPTIONS (path "${testFile(emptyFile)}", header "false")
+ """.stripMargin.replaceAll("\n", " "))
+
+ assert(spark.sql("SELECT count(*) FROM carsTable").collect().head(0) === 0)
+ }
}
test("DDL test with schema") {
- spark.sql(s"""
- |CREATE TEMPORARY TABLE carsTable
- |(yearMade double, makeName string, modelName string, comments string, blank string)
- |USING csv
- |OPTIONS (path "${testFile(carsFile)}", header "true")
- """.stripMargin.replaceAll("\n", " "))
-
- val cars = spark.table("carsTable")
- verifyCars(cars, withHeader = true, checkHeader = false, checkValues = false)
- assert(
- cars.schema.fieldNames === Array("yearMade", "makeName", "modelName", "comments", "blank"))
+ withView("carsTable") {
+ spark.sql(
+ s"""
+ |CREATE TEMPORARY VIEW carsTable
+ |(yearMade double, makeName string, modelName string, comments string, blank string)
+ |USING csv
+ |OPTIONS (path "${testFile(carsFile)}", header "true")
+ """.stripMargin.replaceAll("\n", " "))
+
+ val cars = spark.table("carsTable")
+ verifyCars(cars, withHeader = true, checkHeader = false, checkValues = false)
+ assert(
+ cars.schema.fieldNames === Array("yearMade", "makeName", "modelName", "comments", "blank"))
+ }
}
test("save csv") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 14fbe9f443..1cca15542d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -75,26 +75,26 @@ class JDBCSuite extends SparkFunSuite
sql(
s"""
- |CREATE TEMPORARY TABLE foobar
+ |CREATE OR REPLACE TEMPORARY VIEW foobar
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
sql(
s"""
- |CREATE TEMPORARY TABLE fetchtwo
+ |CREATE OR REPLACE TEMPORARY VIEW fetchtwo
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| ${JDBCOptions.JDBC_BATCH_FETCH_SIZE} '2')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
sql(
s"""
- |CREATE TEMPORARY TABLE parts
+ |CREATE OR REPLACE TEMPORARY VIEW parts
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement("create table test.inttypes (a INT, b BOOLEAN, c TINYINT, "
+ "d SMALLINT, e BIGINT)").executeUpdate()
@@ -105,10 +105,10 @@ class JDBCSuite extends SparkFunSuite
conn.commit()
sql(
s"""
- |CREATE TEMPORARY TABLE inttypes
+ |CREATE OR REPLACE TEMPORARY VIEW inttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement("create table test.strtypes (a BINARY(20), b VARCHAR(20), "
+ "c VARCHAR_IGNORECASE(20), d CHAR(20), e BLOB, f CLOB)").executeUpdate()
@@ -122,10 +122,10 @@ class JDBCSuite extends SparkFunSuite
stmt.executeUpdate()
sql(
s"""
- |CREATE TEMPORARY TABLE strtypes
+ |CREATE OR REPLACE TEMPORARY VIEW strtypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP)"
).executeUpdate()
@@ -136,10 +136,10 @@ class JDBCSuite extends SparkFunSuite
conn.commit()
sql(
s"""
- |CREATE TEMPORARY TABLE timetypes
+ |CREATE OR REPLACE TEMPORARY VIEW timetypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement("create table test.flttypes (a DOUBLE, b REAL, c DECIMAL(38, 18))"
@@ -151,27 +151,27 @@ class JDBCSuite extends SparkFunSuite
conn.commit()
sql(
s"""
- |CREATE TEMPORARY TABLE flttypes
+ |CREATE OR REPLACE TEMPORARY VIEW flttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement(
s"""
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
|m DOUBLE, n REAL, o DECIMAL(38, 18))
- """.stripMargin.replaceAll("\n", " ")).executeUpdate()
+ """.stripMargin.replaceAll("\n", " ")).executeUpdate()
conn.prepareStatement("insert into test.nulltypes values ("
+ "null, null, null, null, null, null, null, null, null, "
+ "null, null, null, null, null, null)").executeUpdate()
conn.commit()
sql(
s"""
- |CREATE TEMPORARY TABLE nulltypes
+ |CREATE OR REPLACE TEMPORARY VIEW nulltypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement(
"create table test.emp(name TEXT(32) NOT NULL," +
@@ -198,11 +198,11 @@ class JDBCSuite extends SparkFunSuite
sql(
s"""
- |CREATE TEMPORARY TABLE nullparts
- |USING org.apache.spark.sql.jdbc
- |OPTIONS (url '$url', dbtable 'TEST.EMP', user 'testUser', password 'testPass',
- |partitionColumn '"Dept"', lowerBound '1', upperBound '4', numPartitions '3')
- """.stripMargin.replaceAll("\n", " "))
+ |CREATE OR REPLACE TEMPORARY VIEW nullparts
+ |USING org.apache.spark.sql.jdbc
+ |OPTIONS (url '$url', dbtable 'TEST.EMP', user 'testUser', password 'testPass',
+ |partitionColumn '"Dept"', lowerBound '1', upperBound '4', numPartitions '3')
+ """.stripMargin.replaceAll("\n", " "))
conn.prepareStatement(
"""create table test."mixedCaseCols" ("Name" TEXT(32), "Id" INTEGER NOT NULL)""")
@@ -214,10 +214,10 @@ class JDBCSuite extends SparkFunSuite
sql(
s"""
- |CREATE TEMPORARY TABLE mixedCaseCols
- |USING org.apache.spark.sql.jdbc
- |OPTIONS (url '$url', dbtable 'TEST."mixedCaseCols"', user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ |CREATE OR REPLACE TEMPORARY VIEW mixedCaseCols
+ |USING org.apache.spark.sql.jdbc
+ |OPTIONS (url '$url', dbtable 'TEST."mixedCaseCols"', user 'testUser', password 'testPass')
+ """.stripMargin.replaceAll("\n", " "))
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
}
@@ -371,11 +371,11 @@ class JDBCSuite extends SparkFunSuite
// Regression test for bug SPARK-7345
sql(
s"""
- |CREATE TEMPORARY TABLE renamed
+ |CREATE OR REPLACE TEMPORARY VIEW renamed
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(select NAME as NAME1, NAME as NAME2 from TEST.PEOPLE)',
|user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
val df = sql("SELECT * FROM renamed")
assert(df.schema.fields.size == 2)
@@ -589,11 +589,11 @@ class JDBCSuite extends SparkFunSuite
test("SQL query as table name") {
sql(
s"""
- |CREATE TEMPORARY TABLE hack
+ |CREATE OR REPLACE TEMPORARY VIEW hack
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)',
| user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
val rows = sql("SELECT * FROM hack").collect()
assert(rows(0).getDouble(0) === 1.00000011920928955) // Yes, I meant ==.
// For some reason, H2 computes this square incorrectly...
@@ -606,11 +606,11 @@ class JDBCSuite extends SparkFunSuite
intercept[JdbcSQLException] {
sql(
s"""
- |CREATE TEMPORARY TABLE abc
+ |CREATE OR REPLACE TEMPORARY VIEW abc
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT _ROWID_ FROM test.people)',
| user 'testUser', password 'testPass')
- """.stripMargin.replaceAll("\n", " "))
+ """.stripMargin.replaceAll("\n", " "))
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
index e535d4dc88..674463feca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLTestSuite.scala
@@ -83,13 +83,13 @@ class DDLTestSuite extends DataSourceTest with SharedSQLContext {
super.beforeAll()
sql(
"""
- |CREATE TEMPORARY TABLE ddlPeople
- |USING org.apache.spark.sql.sources.DDLScanSource
- |OPTIONS (
- | From '1',
- | To '10',
- | Table 'test1'
- |)
+ |CREATE OR REPLACE TEMPORARY VIEW ddlPeople
+ |USING org.apache.spark.sql.sources.DDLScanSource
+ |OPTIONS (
+ | From '1',
+ | To '10',
+ | Table 'test1'
+ |)
""".stripMargin)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index 5b215ca07f..4fc2f81f54 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -34,7 +34,7 @@ class InsertSuite extends DataSourceTest with SharedSQLContext {
spark.read.json(rdd).createOrReplaceTempView("jt")
sql(
s"""
- |CREATE TEMPORARY TABLE jsonTable (a int, b string)
+ |CREATE TEMPORARY VIEW jsonTable (a int, b string)
|USING org.apache.spark.sql.json.DefaultSource
|OPTIONS (
| path '${path.toURI.toString}'
@@ -293,7 +293,7 @@ class InsertSuite extends DataSourceTest with SharedSQLContext {
test("it's not allowed to insert into a relation that is not an InsertableRelation") {
sql(
"""
- |CREATE TEMPORARY TABLE oneToTen
+ |CREATE TEMPORARY VIEW oneToTen
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index 8fda1c5875..6937e97a47 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -148,8 +148,8 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
withTempView("parquet_temp") {
sql(
"""
- |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING)
- |USING org.apache.spark.sql.parquet.DefaultSource
+ |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING)
+ |USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
// An empty sequence of row is returned for session temporary table.
@@ -401,8 +401,8 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
withTempView("parquet_temp") {
sql(
"""
- |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING)
- |USING org.apache.spark.sql.parquet.DefaultSource
+ |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING)
+ |USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
// An empty sequence of row is returned for session temporary table.
intercept[NoSuchTableException] {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 20f30e48ab..faed8b5046 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1290,7 +1290,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
"interval 4 minutes 59 seconds 889 milliseconds 987 microseconds")))
}
- test("specifying database name for a temporary table is not allowed") {
+ test("specifying database name for a temporary view is not allowed") {
withTempPath { dir =>
val path = dir.toURI.toString
val df = sparkContext.parallelize(1 to 10).map(i => (i, i.toString)).toDF("num", "str")
@@ -1303,23 +1303,23 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
intercept[AnalysisException] {
spark.sql(
s"""
- |CREATE TEMPORARY TABLE db.t
- |USING parquet
- |OPTIONS (
- | path '$path'
- |)
- """.stripMargin)
+ |CREATE TEMPORARY VIEW db.t
+ |USING parquet
+ |OPTIONS (
+ | path '$path'
+ |)
+ """.stripMargin)
}
// If you use backticks to quote the name then it's OK.
spark.sql(
s"""
- |CREATE TEMPORARY TABLE `db.t`
+ |CREATE TEMPORARY VIEW `db.t`
|USING parquet
|OPTIONS (
| path '$path'
|)
- """.stripMargin)
+ """.stripMargin)
checkAnswer(spark.table("`db.t`"), df)
}
}