aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-06-14 11:44:37 -0700
committerWenchen Fan <wenchen@databricks.com>2016-06-14 11:44:37 -0700
commitdf4ea6614d709ee66f1ceb966df6216b125b8ea1 (patch)
tree41dd629c3ff4d166513d0f01887946152a91a17c /sql/hive
parentc5b735581922c52a1b1cc6cd8c7b5878d3cf8f20 (diff)
downloadspark-df4ea6614d709ee66f1ceb966df6216b125b8ea1.tar.gz
spark-df4ea6614d709ee66f1ceb966df6216b125b8ea1.tar.bz2
spark-df4ea6614d709ee66f1ceb966df6216b125b8ea1.zip
[SPARK-15864][SQL] Fix Inconsistent Behaviors when Uncaching Non-cached Tables
#### What changes were proposed in this pull request? To uncache a table, we have three different ways: - _SQL interface_: `UNCACHE TABLE` - _DataSet API_: `sparkSession.catalog.uncacheTable` - _DataSet API_: `sparkSession.table(tableName).unpersist()` When the table is not cached, - _SQL interface_: `UNCACHE TABLE non-cachedTable` -> **no error message** - _Dataset API_: `sparkSession.catalog.uncacheTable("non-cachedTable")` -> **report a strange error message:** ```requirement failed: Table [a: int] is not cached``` - _Dataset API_: `sparkSession.table("non-cachedTable").unpersist()` -> **no error message** This PR will make them consistent. No operation if the table has already been uncached. In addition, this PR also removes `uncacheQuery` and renames `tryUncacheQuery` to `uncacheQuery`, and documents it that it's noop if the table has already been uncached #### How was this patch tested? Improved the existing test case for verifying the cases when the table has not been cached. Also added test cases for verifying the cases when the table does not exist Author: gatorsmile <gatorsmile@gmail.com> Author: xiaoli <lixiao1983@gmail.com> Author: Xiao Li <xiaoli@Xiaos-MacBook-Pro.local> Closes #13593 from gatorsmile/uncacheNonCachedTable.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala24
1 files changed, 20 insertions, 4 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
index 5121440f06..e35a71917f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
@@ -20,12 +20,14 @@ package org.apache.spark.sql.hive
import java.io.File
import org.apache.spark.sql.{AnalysisException, QueryTest, SaveMode}
+import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.hive.test.TestHiveSingleton
+import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.storage.RDDBlockId
import org.apache.spark.util.Utils
-class CachedTableSuite extends QueryTest with TestHiveSingleton {
+class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
import hiveContext._
def rddIdOf(tableName: String): Int = {
@@ -95,9 +97,23 @@ class CachedTableSuite extends QueryTest with TestHiveSingleton {
sql("DROP TABLE IF EXISTS nonexistantTable")
}
- test("correct error on uncache of non-cached table") {
- intercept[IllegalArgumentException] {
- spark.catalog.uncacheTable("src")
+ test("correct error on uncache of nonexistant tables") {
+ intercept[NoSuchTableException] {
+ spark.catalog.uncacheTable("nonexistantTable")
+ }
+ intercept[NoSuchTableException] {
+ sql("UNCACHE TABLE nonexistantTable")
+ }
+ }
+
+ test("no error on uncache of non-cached table") {
+ val tableName = "newTable"
+ withTable(tableName) {
+ sql(s"CREATE TABLE $tableName(a INT)")
+ // no error will be reported in the following three ways to uncache a table.
+ spark.catalog.uncacheTable(tableName)
+ sql("UNCACHE TABLE newTable")
+ sparkSession.table(tableName).unpersist()
}
}