aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-07-06 21:43:55 +0800
committerWenchen Fan <wenchen@databricks.com>2016-07-06 21:43:55 +0800
commit21eadd1d8cbf029197e73ffca1cba54d5a890c01 (patch)
treec45f9be1e0255bf868d97fd0bddc487e436c5867 /sql/hive/src
parent909c6d812f6ca3a3305e4611a700c8c17905b953 (diff)
downloadspark-21eadd1d8cbf029197e73ffca1cba54d5a890c01.tar.gz
spark-21eadd1d8cbf029197e73ffca1cba54d5a890c01.tar.bz2
spark-21eadd1d8cbf029197e73ffca1cba54d5a890c01.zip
[SPARK-16229][SQL] Drop Empty Table After CREATE TABLE AS SELECT fails
#### What changes were proposed in this pull request? In `CREATE TABLE AS SELECT`, if the `SELECT` query failed, the table should not exist. For example, ```SQL CREATE TABLE tab STORED AS TEXTFILE SELECT 1 AS a, (SELECT a FROM (SELECT 1 AS a UNION ALL SELECT 2 AS a) t) AS b ``` The above query failed as expected but an empty table `t` is created. This PR is to drop the created table when hitting any non-fatal exception. #### How was this patch tested? Added a test case to verify the behavior Author: gatorsmile <gatorsmile@gmail.com> Closes #13926 from gatorsmile/dropTableAfterException.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala13
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala15
2 files changed, 26 insertions, 2 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
index b8099385a4..15a5d79dcb 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateHiveTableAsSelectCommand.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.hive.execution
+import scala.util.control.NonFatal
+
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.catalog.{CatalogColumn, CatalogTable}
import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoTable, LogicalPlan}
@@ -87,8 +89,15 @@ case class CreateHiveTableAsSelectCommand(
throw new AnalysisException(s"$tableIdentifier already exists.")
}
} else {
- sparkSession.sessionState.executePlan(InsertIntoTable(
- metastoreRelation, Map(), query, overwrite = true, ifNotExists = false)).toRdd
+ try {
+ sparkSession.sessionState.executePlan(InsertIntoTable(
+ metastoreRelation, Map(), query, overwrite = true, ifNotExists = false)).toRdd
+ } catch {
+ case NonFatal(e) =>
+ // drop the created table.
+ sparkSession.sessionState.catalog.dropTable(tableIdentifier, ignoreIfNotExists = true)
+ throw e
+ }
}
Seq.empty[Row]
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 89f69c8e4d..9d3c4cd3d5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -554,6 +554,21 @@ class HiveDDLSuite
}
}
+ test("Create Cataloged Table As Select - Drop Table After Runtime Exception") {
+ withTable("tab") {
+ intercept[RuntimeException] {
+ sql(
+ """
+ |CREATE TABLE tab
+ |STORED AS TEXTFILE
+ |SELECT 1 AS a, (SELECT a FROM (SELECT 1 AS a UNION ALL SELECT 2 AS a) t) AS b
+ """.stripMargin)
+ }
+ // After hitting runtime exception, we should drop the created table.
+ assert(!spark.sessionState.catalog.tableExists(TableIdentifier("tab")))
+ }
+ }
+
test("desc table for data source table") {
withTable("tab1") {
val tabName = "tab1"