aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-12-29 21:22:13 +0100
committerHerman van Hovell <hvanhovell@databricks.com>2016-12-29 21:22:13 +0100
commit752d9eeb9bff7934a0645ffd8059cde42da8eeef (patch)
tree110742cc845026c3c281dd3e90d3e50c66f87255 /sql/core
parent7d19b6ab7d75b95d9eb1c7e1f228d23fd482306e (diff)
downloadspark-752d9eeb9bff7934a0645ffd8059cde42da8eeef.tar.gz
spark-752d9eeb9bff7934a0645ffd8059cde42da8eeef.tar.bz2
spark-752d9eeb9bff7934a0645ffd8059cde42da8eeef.zip
[SPARK-19012][SQL] Fix `createTempViewCommand` to throw AnalysisException instead of ParseException
## What changes were proposed in this pull request? Currently, `createTempView`, `createOrReplaceTempView`, and `createGlobalTempView` show `ParseExceptions` on invalid table names. We had better show better error message. Also, this PR also adds and updates the missing description on the API docs correctly. **BEFORE** ``` scala> spark.range(10).createOrReplaceTempView("11111") org.apache.spark.sql.catalyst.parser.ParseException: mismatched input '11111' expecting {'SELECT', 'FROM', 'ADD', ...}(line 1, pos 0) == SQL == 11111 ... ``` **AFTER** ``` scala> spark.range(10).createOrReplaceTempView("11111") org.apache.spark.sql.AnalysisException: Invalid view name: 11111; ... ``` ## How was this patch tested? Pass the Jenkins with updated a test case. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #16427 from dongjoon-hyun/SPARK-19012.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala13
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala18
2 files changed, 20 insertions, 11 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 29397b1340..c1cedd8541 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -39,6 +39,7 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.json.JacksonGenerator
import org.apache.spark.sql.catalyst.optimizer.CombineUnions
+import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, PartitioningCollection}
@@ -2569,7 +2570,7 @@ class Dataset[T] private[sql](
* created it, i.e. it will be automatically dropped when the session terminates. It's not
* tied to any databases, i.e. we can't use `db1.view1` to reference a local temporary view.
*
- * @throws AnalysisException if the view name already exists
+ * @throws AnalysisException if the view name is invalid or already exists
*
* @group basic
* @since 2.0.0
@@ -2601,7 +2602,7 @@ class Dataset[T] private[sql](
* preserved database `_global_temp`, and we must use the qualified name to refer a global temp
* view, e.g. `SELECT * FROM _global_temp.view1`.
*
- * @throws AnalysisException if the view name already exists
+ * @throws AnalysisException if the view name is invalid or already exists
*
* @group basic
* @since 2.1.0
@@ -2616,8 +2617,14 @@ class Dataset[T] private[sql](
replace: Boolean,
global: Boolean): CreateViewCommand = {
val viewType = if (global) GlobalTempView else LocalTempView
+
+ val tableIdentifier = try {
+ sparkSession.sessionState.sqlParser.parseTableIdentifier(viewName)
+ } catch {
+ case _: ParseException => throw new AnalysisException(s"Invalid view name: $viewName")
+ }
CreateViewCommand(
- name = sparkSession.sessionState.sqlParser.parseTableIdentifier(viewName),
+ name = tableIdentifier,
userSpecifiedColumns = Nil,
comment = None,
properties = Map.empty,
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 312cd17c26..f4df80fd9c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -1518,14 +1518,16 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
test("SPARK-12982: Add table name validation in temp table registration") {
val df = Seq("foo", "bar").map(Tuple1.apply).toDF("col")
- // invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("t~"))
- // valid table name test as below
- df.createOrReplaceTempView("table1")
- // another invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("#$@sum"))
- // another invalid table name test as below
- intercept[AnalysisException](df.createOrReplaceTempView("table!#"))
+ // invalid table names
+ Seq("11111", "t~", "#$@sum", "table!#").foreach { name =>
+ val m = intercept[AnalysisException](df.createOrReplaceTempView(name)).getMessage
+ assert(m.contains(s"Invalid view name: $name"))
+ }
+
+ // valid table names
+ Seq("table1", "`11111`", "`t~`", "`#$@sum`", "`table!#`").foreach { name =>
+ df.createOrReplaceTempView(name)
+ }
}
test("assertAnalyzed shouldn't replace original stack trace") {