aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test/scala
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-09-29 17:56:32 -0700
committerReynold Xin <rxin@databricks.com>2016-09-29 17:56:32 -0700
commit74ac1c43817c0b8da70342e540ec7638dd7d01bd (patch)
tree2b804a8fb24074efeaa8537292ce98136eaa8b95 /sql/core/src/test/scala
parent2f739567080d804a942cfcca0e22f91ab7cbea36 (diff)
downloadspark-74ac1c43817c0b8da70342e540ec7638dd7d01bd.tar.gz
spark-74ac1c43817c0b8da70342e540ec7638dd7d01bd.tar.bz2
spark-74ac1c43817c0b8da70342e540ec7638dd7d01bd.zip
[SPARK-17717][SQL] Add exist/find methods to Catalog.
## What changes were proposed in this pull request? The current user facing catalog does not implement methods for checking object existence or finding objects. You could theoretically do this using the `list*` commands, but this is rather cumbersome and can actually be costly when there are many objects. This PR adds `exists*` and `find*` methods for Databases, Table and Functions. ## How was this patch tested? Added tests to `org.apache.spark.sql.internal.CatalogSuite` Author: Herman van Hovell <hvanhovell@databricks.com> Closes #15301 from hvanhovell/SPARK-17717.
Diffstat (limited to 'sql/core/src/test/scala')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala118
1 files changed, 118 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index 3dc67ffafb..783bf77f86 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -340,6 +340,124 @@ class CatalogSuite
}
}
+ test("find database") {
+ intercept[AnalysisException](spark.catalog.findDatabase("db10"))
+ withTempDatabase { db =>
+ assert(spark.catalog.findDatabase(db).name === db)
+ }
+ }
+
+ test("find table") {
+ withTempDatabase { db =>
+ withTable(s"tbl_x", s"$db.tbl_y") {
+ // Try to find non existing tables.
+ intercept[AnalysisException](spark.catalog.findTable("tbl_x"))
+ intercept[AnalysisException](spark.catalog.findTable("tbl_y"))
+ intercept[AnalysisException](spark.catalog.findTable(db, "tbl_y"))
+
+ // Create objects.
+ createTempTable("tbl_x")
+ createTable("tbl_y", Some(db))
+
+ // Find a temporary table
+ assert(spark.catalog.findTable("tbl_x").name === "tbl_x")
+
+ // Find a qualified table
+ assert(spark.catalog.findTable(db, "tbl_y").name === "tbl_y")
+
+ // Find an unqualified table using the current database
+ intercept[AnalysisException](spark.catalog.findTable("tbl_y"))
+ spark.catalog.setCurrentDatabase(db)
+ assert(spark.catalog.findTable("tbl_y").name === "tbl_y")
+ }
+ }
+ }
+
+ test("find function") {
+ withTempDatabase { db =>
+ withUserDefinedFunction("fn1" -> true, s"$db.fn2" -> false) {
+ // Try to find non existing functions.
+ intercept[AnalysisException](spark.catalog.findFunction("fn1"))
+ intercept[AnalysisException](spark.catalog.findFunction("fn2"))
+ intercept[AnalysisException](spark.catalog.findFunction(db, "fn2"))
+
+ // Create objects.
+ createTempFunction("fn1")
+ createFunction("fn2", Some(db))
+
+ // Find a temporary function
+ assert(spark.catalog.findFunction("fn1").name === "fn1")
+
+ // Find a qualified function
+ assert(spark.catalog.findFunction(db, "fn2").name === "fn2")
+
+ // Find an unqualified function using the current database
+ intercept[AnalysisException](spark.catalog.findFunction("fn2"))
+ spark.catalog.setCurrentDatabase(db)
+ assert(spark.catalog.findFunction("fn2").name === "fn2")
+ }
+ }
+ }
+
+ test("database exists") {
+ assert(!spark.catalog.databaseExists("db10"))
+ createDatabase("db10")
+ assert(spark.catalog.databaseExists("db10"))
+ dropDatabase("db10")
+ }
+
+ test("table exists") {
+ withTempDatabase { db =>
+ withTable(s"tbl_x", s"$db.tbl_y") {
+ // Try to find non existing tables.
+ assert(!spark.catalog.tableExists("tbl_x"))
+ assert(!spark.catalog.tableExists("tbl_y"))
+ assert(!spark.catalog.tableExists(db, "tbl_y"))
+
+ // Create objects.
+ createTempTable("tbl_x")
+ createTable("tbl_y", Some(db))
+
+ // Find a temporary table
+ assert(spark.catalog.tableExists("tbl_x"))
+
+ // Find a qualified table
+ assert(spark.catalog.tableExists(db, "tbl_y"))
+
+ // Find an unqualified table using the current database
+ assert(!spark.catalog.tableExists("tbl_y"))
+ spark.catalog.setCurrentDatabase(db)
+ assert(spark.catalog.tableExists("tbl_y"))
+ }
+ }
+ }
+
+ test("function exists") {
+ withTempDatabase { db =>
+ withUserDefinedFunction("fn1" -> true, s"$db.fn2" -> false) {
+ // Try to find non existing functions.
+ assert(!spark.catalog.functionExists("fn1"))
+ assert(!spark.catalog.functionExists("fn2"))
+ assert(!spark.catalog.functionExists(db, "fn2"))
+
+ // Create objects.
+ createTempFunction("fn1")
+ createFunction("fn2", Some(db))
+
+ // Find a temporary function
+ assert(spark.catalog.functionExists("fn1"))
+
+ // Find a qualified function
+ assert(spark.catalog.functionExists(db, "fn2"))
+
+ // Find an unqualified function using the current database
+ assert(!spark.catalog.functionExists("fn2"))
+ spark.catalog.setCurrentDatabase(db)
+ assert(spark.catalog.functionExists("fn2"))
+ }
+ }
+ }
+
// TODO: add tests for the rest of them
}