aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala25
2 files changed, 26 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index c3e3b215bb..2a452f4379 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -1212,7 +1212,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
*
* For example:
* {{{
- * CREATE [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
+ * CREATE [OR REPLACE] [TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
* [(column_name [COMMENT column_comment], ...) ]
* [COMMENT view_comment]
* [TBLPROPERTIES (property_name = property_value, ...)]
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
index eacf254cd1..98aa447fc0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql
import java.io.File
import org.apache.spark.SparkException
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
/**
@@ -85,4 +86,28 @@ class MetadataCacheSuite extends QueryTest with SharedSQLContext {
assert(newCount > 0 && newCount < 100)
}}
}
+
+ test("case sensitivity support in temporary view refresh") {
+ withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
+ withTempView("view_refresh") {
+ withTempPath { (location: File) =>
+ // Create a Parquet directory
+ spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
+ .write.parquet(location.getAbsolutePath)
+
+ // Read the directory in
+ spark.read.parquet(location.getAbsolutePath).createOrReplaceTempView("view_refresh")
+
+ // Delete a file
+ deleteOneFileInDirectory(location)
+ intercept[SparkException](sql("select count(*) from view_refresh").first())
+
+ // Refresh and we should be able to read it again.
+ spark.catalog.refreshTable("vIeW_reFrEsH")
+ val newCount = sql("select count(*) from view_refresh").first().getLong(0)
+ assert(newCount > 0 && newCount < 100)
+ }
+ }
+ }
+ }
}