aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-12-19 11:42:59 -0800
committerYin Huai <yhuai@databricks.com>2016-12-19 11:42:59 -0800
commit7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b (patch)
treed8d7c964c0729f7c01a4c9c7a5db001d3dbefe61 /sql/hive/src/test
parent24482858e05bea84cacb41c62be0a9aaa33897ee (diff)
downloadspark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.tar.gz
spark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.tar.bz2
spark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.zip
[SPARK-18921][SQL] check database existence with Hive.databaseExists instead of getDatabase
## What changes were proposed in this pull request? It's weird that we use `Hive.getDatabase` to check the existence of a database, while Hive has a `databaseExists` interface. What's worse, `Hive.getDatabase` will produce an error message if the database doesn't exist, which is annoying when we only want to check the database existence. This PR fixes this and use `Hive.databaseExists` to check database existence. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #16332 from cloud-fan/minor.
Diffstat (limited to 'sql/hive/src/test')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala13
1 files changed, 7 insertions, 6 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index 8dd06998ba..5cb8519d2a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -28,7 +28,7 @@ import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
-import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
+import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPermanentFunctionException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal}
import org.apache.spark.sql.catalyst.util.quietly
@@ -137,11 +137,12 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet
test(s"$version: getDatabase") {
// No exception should be thrown
client.getDatabase("default")
+ intercept[NoSuchDatabaseException](client.getDatabase("nonexist"))
}
- test(s"$version: getDatabaseOption") {
- assert(client.getDatabaseOption("default").isDefined)
- assert(client.getDatabaseOption("nonexist") == None)
+ test(s"$version: databaseExists") {
+ assert(client.databaseExists("default") == true)
+ assert(client.databaseExists("nonexist") == false)
}
test(s"$version: listDatabases") {
@@ -155,9 +156,9 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet
}
test(s"$version: dropDatabase") {
- assert(client.getDatabaseOption("temporary").isDefined)
+ assert(client.databaseExists("temporary") == true)
client.dropDatabase("temporary", ignoreIfNotExists = false, cascade = true)
- assert(client.getDatabaseOption("temporary").isEmpty)
+ assert(client.databaseExists("temporary") == false)
}
///////////////////////////////////////////////////////////////////////////