aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-12-19 11:42:59 -0800
committerYin Huai <yhuai@databricks.com>2016-12-19 11:42:59 -0800
commit7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b (patch)
treed8d7c964c0729f7c01a4c9c7a5db001d3dbefe61 /sql/hive/src
parent24482858e05bea84cacb41c62be0a9aaa33897ee (diff)
downloadspark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.tar.gz
spark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.tar.bz2
spark-7a75ee1c9224aa5c2e954fe2a71f9ad506f6782b.zip
[SPARK-18921][SQL] check database existence with Hive.databaseExists instead of getDatabase
## What changes were proposed in this pull request? It's weird that we use `Hive.getDatabase` to check the existence of a database, while Hive has a `databaseExists` interface. What's worse, `Hive.getDatabase` will produce an error message if the database doesn't exist, which is annoying when we only want to check the database existence. This PR fixes this and use `Hive.databaseExists` to check database existence. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #16332 from cloud-fan/minor.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala2
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala8
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala12
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala13
4 files changed, 19 insertions, 16 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index 544f277cdf..9c19a0e364 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -167,7 +167,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
}
override def databaseExists(db: String): Boolean = withClient {
- client.getDatabaseOption(db).isDefined
+ client.databaseExists(db)
}
override def listDatabases(): Seq[String] = withClient {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
index 837b6c57fc..8bdcf3111d 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClient.scala
@@ -58,12 +58,10 @@ private[hive] trait HiveClient {
def setCurrentDatabase(databaseName: String): Unit
/** Returns the metadata for specified database, throwing an exception if it doesn't exist */
- final def getDatabase(name: String): CatalogDatabase = {
- getDatabaseOption(name).getOrElse(throw new NoSuchDatabaseException(name))
- }
+ def getDatabase(name: String): CatalogDatabase
- /** Returns the metadata for a given database, or None if it doesn't exist. */
- def getDatabaseOption(name: String): Option[CatalogDatabase]
+ /** Return whether a table/view with the specified name exists. */
+ def databaseExists(dbName: String): Boolean
/** List the names of all the databases that match the specified pattern. */
def listDatabases(pattern: String): Seq[String]
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index b75f6e98d5..bacae8a9a5 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -300,7 +300,7 @@ private[hive] class HiveClientImpl(
}
override def setCurrentDatabase(databaseName: String): Unit = withHiveState {
- if (getDatabaseOption(databaseName).isDefined) {
+ if (databaseExists(databaseName)) {
state.setCurrentDatabase(databaseName)
} else {
throw new NoSuchDatabaseException(databaseName)
@@ -336,14 +336,18 @@ private[hive] class HiveClientImpl(
Option(database.properties).map(_.asJava).orNull))
}
- override def getDatabaseOption(name: String): Option[CatalogDatabase] = withHiveState {
- Option(client.getDatabase(name)).map { d =>
+ override def getDatabase(dbName: String): CatalogDatabase = withHiveState {
+ Option(client.getDatabase(dbName)).map { d =>
CatalogDatabase(
name = d.getName,
description = d.getDescription,
locationUri = d.getLocationUri,
properties = Option(d.getParameters).map(_.asScala.toMap).orNull)
- }
+ }.getOrElse(throw new NoSuchDatabaseException(dbName))
+ }
+
+ override def databaseExists(dbName: String): Boolean = withHiveState {
+ client.databaseExists(dbName)
}
override def listDatabases(pattern: String): Seq[String] = withHiveState {
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index 8dd06998ba..5cb8519d2a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -28,7 +28,7 @@ import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
-import org.apache.spark.sql.catalyst.analysis.NoSuchPermanentFunctionException
+import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPermanentFunctionException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, EqualTo, Literal}
import org.apache.spark.sql.catalyst.util.quietly
@@ -137,11 +137,12 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet
test(s"$version: getDatabase") {
// No exception should be thrown
client.getDatabase("default")
+ intercept[NoSuchDatabaseException](client.getDatabase("nonexist"))
}
- test(s"$version: getDatabaseOption") {
- assert(client.getDatabaseOption("default").isDefined)
- assert(client.getDatabaseOption("nonexist") == None)
+ test(s"$version: databaseExists") {
+ assert(client.databaseExists("default") == true)
+ assert(client.databaseExists("nonexist") == false)
}
test(s"$version: listDatabases") {
@@ -155,9 +156,9 @@ class VersionsSuite extends SparkFunSuite with SQLTestUtils with TestHiveSinglet
}
test(s"$version: dropDatabase") {
- assert(client.getDatabaseOption("temporary").isDefined)
+ assert(client.databaseExists("temporary") == true)
client.dropDatabase("temporary", ignoreIfNotExists = false, cascade = true)
- assert(client.getDatabaseOption("temporary").isEmpty)
+ assert(client.databaseExists("temporary") == false)
}
///////////////////////////////////////////////////////////////////////////