aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-05-26 12:04:18 -0700
committerAndrew Or <andrew@databricks.com>2016-05-26 12:04:18 -0700
commit2b1ac6cea882246ef0e655bb2c134ef1656a5068 (patch)
treeb165497efd21dde4969c62ebaa5b606c601623a3
parent01b350a4f7c17d6516b27b6cd27ba8390834d40c (diff)
downloadspark-2b1ac6cea882246ef0e655bb2c134ef1656a5068.tar.gz
spark-2b1ac6cea882246ef0e655bb2c134ef1656a5068.tar.bz2
spark-2b1ac6cea882246ef0e655bb2c134ef1656a5068.zip
[SPARK-15539][SQL] DROP TABLE throw exception if table doesn't exist
## What changes were proposed in this pull request? Same as #13302, but for DROP TABLE. ## How was this patch tested? `DDLSuite` Author: Andrew Or <andrew@databricks.com> Closes #13307 from andrewor14/drop-table.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala10
-rw-r--r--sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala52
-rw-r--r--sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala10
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala4
6 files changed, 42 insertions, 40 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index dd3f17d525..ffea628552 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -185,7 +185,7 @@ case class DropTableCommand(
if (!catalog.tableExists(tableName)) {
if (!ifExists) {
val objectName = if (isView) "View" else "Table"
- logError(s"$objectName '${tableName.quotedString}' does not exist")
+ throw new AnalysisException(s"$objectName to drop '$tableName' does not exist")
}
} else {
// If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view
@@ -202,7 +202,7 @@ case class DropTableCommand(
try {
sparkSession.cacheManager.tryUncacheQuery(sparkSession.table(tableName.quotedString))
} catch {
- case NonFatal(e) => log.warn(s"${e.getMessage}", e)
+ case NonFatal(e) => log.warn(e.toString, e)
}
catalog.invalidateTable(tableName)
catalog.dropTable(tableName, ifExists)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 64f5a4ac47..bddd3f2119 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -741,14 +741,12 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
sql("DROP TABLE dbx.tab1")
assert(catalog.listTables("dbx") == Nil)
sql("DROP TABLE IF EXISTS dbx.tab1")
- // no exception will be thrown
- sql("DROP TABLE dbx.tab1")
+ intercept[AnalysisException] {
+ sql("DROP TABLE dbx.tab1")
+ }
}
- test("drop view in SQLContext") {
- // SQLContext does not support create view. Log an error message, if tab1 does not exists
- sql("DROP VIEW tab1")
-
+ test("drop view") {
val catalog = spark.sessionState.catalog
val tableIdent = TableIdentifier("tab1", Some("dbx"))
createDatabase(catalog, "dbx")
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
index a8645f7cd3..2d5a970c12 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
@@ -515,7 +515,33 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"plan_json",
// This test uses CREATE EXTERNAL TABLE without specifying LOCATION
- "alter2"
+ "alter2",
+
+ // These tests DROP TABLE that don't exist (but do not specify IF EXISTS)
+ "alter_rename_partition1",
+ "date_1",
+ "date_4",
+ "date_join1",
+ "date_serde",
+ "insert_compressed",
+ "lateral_view_cp",
+ "leftsemijoin",
+ "mapjoin_subquery2",
+ "nomore_ambiguous_table_col",
+ "partition_date",
+ "partition_varchar1",
+ "ppd_repeated_alias",
+ "push_or",
+ "reducesink_dedup",
+ "subquery_in",
+ "subquery_notin_having",
+ "timestamp_3",
+ "timestamp_lazy",
+ "udaf_covar_pop",
+ "union31",
+ "union_date",
+ "varchar_2",
+ "varchar_join1"
)
/**
@@ -529,7 +555,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"add_partition_with_whitelist",
"alias_casted_column",
"alter_partition_with_whitelist",
- "alter_rename_partition",
"ambiguous_col",
"annotate_stats_join",
"annotate_stats_limit",
@@ -606,12 +631,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"database_drop",
"database_location",
"database_properties",
- "date_1",
"date_2",
- "date_4",
"date_comparison",
- "date_join1",
- "date_serde",
"decimal_1",
"decimal_4",
"decimal_join",
@@ -737,7 +758,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"insert1",
"insert1_overwrite_partitions",
"insert2_overwrite_partitions",
- "insert_compressed",
"join1",
"join10",
"join11",
@@ -793,10 +813,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"join_reorder4",
"join_star",
"lateral_view",
- "lateral_view_cp",
"lateral_view_noalias",
"lateral_view_ppd",
- "leftsemijoin",
"leftsemijoin_mr",
"limit_pushdown_negative",
"lineage1",
@@ -824,7 +842,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"mapjoin_filter_on_outerjoin",
"mapjoin_mapjoin",
"mapjoin_subquery",
- "mapjoin_subquery2",
"mapjoin_test_outer",
"mapreduce1",
"mapreduce2",
@@ -846,7 +863,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"multi_join_union",
"multigroupby_singlemr",
"noalias_subq1",
- "nomore_ambiguous_table_col",
"nonblock_op_deduplicate",
"notable_alias1",
"notable_alias2",
@@ -870,10 +886,8 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"part_inherit_tbl_props",
"part_inherit_tbl_props_with_star",
"partcols1",
- "partition_date",
"partition_serde_format",
"partition_type_check",
- "partition_varchar1",
"partition_wise_fileformat9",
"ppd1",
"ppd2",
@@ -893,7 +907,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"ppd_outer_join4",
"ppd_outer_join5",
"ppd_random",
- "ppd_repeated_alias",
"ppd_udf_col",
"ppd_union",
"ppr_allchildsarenull",
@@ -901,7 +914,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"ppr_pushdown2",
"ppr_pushdown3",
"progress_1",
- "push_or",
"query_with_semi",
"quote1",
"quote2",
@@ -913,7 +925,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"reduce_deduplicate_exclude_gby",
"reduce_deduplicate_exclude_join",
"reduce_deduplicate_extended",
- "reducesink_dedup",
"router_join_ppr",
"select_as_omitted",
"select_unquote_and",
@@ -936,20 +947,15 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"subquery_exists_having",
"subquery_notexists",
"subquery_notexists_having",
- "subquery_in",
"subquery_in_having",
- "subquery_notin_having",
"tablename_with_select",
- "timestamp_3",
"timestamp_comparison",
- "timestamp_lazy",
"timestamp_null",
"transform_ppr1",
"transform_ppr2",
"type_cast_1",
"type_widening",
"udaf_collect_set",
- "udaf_covar_pop",
"udaf_histogram_numeric",
"udf2",
"udf5",
@@ -1113,7 +1119,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"union29",
"union3",
"union30",
- "union31",
"union33",
"union34",
"union4",
@@ -1122,13 +1127,10 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"union7",
"union8",
"union9",
- "union_date",
"union_lateralview",
"union_ppr",
"union_remove_6",
"union_script",
- "varchar_2",
- "varchar_join1",
"varchar_union1",
"view",
"view_cast",
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
index de592f8d93..6c3978154d 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
@@ -826,15 +826,17 @@ class HiveWindowFunctionQueryFileSuite
"windowing_ntile",
"windowing_udaf",
"windowing_windowspec",
- "windowing_rank"
- )
+ "windowing_rank",
- override def whiteList: Seq[String] = Seq(
- "windowing_udaf2",
+ // These tests DROP TABLE that don't exist (but do not specify IF EXISTS)
"windowing_columnPruning",
"windowing_adjust_rowcontainer_sz"
)
+ override def whiteList: Seq[String] = Seq(
+ "windowing_udaf2"
+ )
+
// Only run those query tests in the realWhileList (do not try other ignored query files).
override def testCases: Seq[(String, File)] = super.testCases.filter {
case (name, _) => realWhiteList.contains(name)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 686c63065d..153b0c3c72 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -548,7 +548,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
}.getMessage.contains("Unable to infer schema"),
"We should complain that path is not specified.")
- sql("DROP TABLE createdJsonTable")
+ sql("DROP TABLE IF EXISTS createdJsonTable")
}
test("scan a parquet table created through a CTAS statement") {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
index cc05e56d66..266fdd6c1f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/QueryPartitionSuite.scala
@@ -61,8 +61,8 @@ class QueryPartitionSuite extends QueryTest with SQLTestUtils with TestHiveSingl
checkAnswer(sql("select key,value from table_with_partition"),
testData.toDF.collect ++ testData.toDF.collect ++ testData.toDF.collect)
- sql("DROP TABLE table_with_partition")
- sql("DROP TABLE createAndInsertTest")
+ sql("DROP TABLE IF EXISTS table_with_partition")
+ sql("DROP TABLE IF EXISTS createAndInsertTest")
}
}
}