aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorHerman van Hovell <hvanhovell@databricks.com>2016-10-01 00:50:16 -0700
committerReynold Xin <rxin@databricks.com>2016-10-01 00:50:16 -0700
commitaf6ece33d39cf305bd4a211d08a2f8e910c69bc1 (patch)
tree56ca7515a393966478b738836e00b4f3c2d4df0d /sql/core/src/test
parent4bcd9b728b8df74756d16b27725c2db7c523d4b2 (diff)
downloadspark-af6ece33d39cf305bd4a211d08a2f8e910c69bc1.tar.gz
spark-af6ece33d39cf305bd4a211d08a2f8e910c69bc1.tar.bz2
spark-af6ece33d39cf305bd4a211d08a2f8e910c69bc1.zip
[SPARK-17717][SQL] Add Exist/find methods to Catalog [FOLLOW-UP]
## What changes were proposed in this pull request? We added find and exists methods for Databases, Tables and Functions to the user facing Catalog in PR https://github.com/apache/spark/pull/15301. However, it was brought up that the semantics of the `find` methods are more in line a `get` method (get an object or else fail). So we rename these in this PR. ## How was this patch tested? Existing tests. Author: Herman van Hovell <hvanhovell@databricks.com> Closes #15308 from hvanhovell/SPARK-17717-2.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala38
1 files changed, 19 insertions, 19 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index 783bf77f86..214bc736bd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -340,61 +340,61 @@ class CatalogSuite
}
}
- test("find database") {
- intercept[AnalysisException](spark.catalog.findDatabase("db10"))
+ test("get database") {
+ intercept[AnalysisException](spark.catalog.getDatabase("db10"))
withTempDatabase { db =>
- assert(spark.catalog.findDatabase(db).name === db)
+ assert(spark.catalog.getDatabase(db).name === db)
}
}
- test("find table") {
+ test("get table") {
withTempDatabase { db =>
withTable(s"tbl_x", s"$db.tbl_y") {
// Try to find non existing tables.
- intercept[AnalysisException](spark.catalog.findTable("tbl_x"))
- intercept[AnalysisException](spark.catalog.findTable("tbl_y"))
- intercept[AnalysisException](spark.catalog.findTable(db, "tbl_y"))
+ intercept[AnalysisException](spark.catalog.getTable("tbl_x"))
+ intercept[AnalysisException](spark.catalog.getTable("tbl_y"))
+ intercept[AnalysisException](spark.catalog.getTable(db, "tbl_y"))
// Create objects.
createTempTable("tbl_x")
createTable("tbl_y", Some(db))
// Find a temporary table
- assert(spark.catalog.findTable("tbl_x").name === "tbl_x")
+ assert(spark.catalog.getTable("tbl_x").name === "tbl_x")
// Find a qualified table
- assert(spark.catalog.findTable(db, "tbl_y").name === "tbl_y")
+ assert(spark.catalog.getTable(db, "tbl_y").name === "tbl_y")
// Find an unqualified table using the current database
- intercept[AnalysisException](spark.catalog.findTable("tbl_y"))
+ intercept[AnalysisException](spark.catalog.getTable("tbl_y"))
spark.catalog.setCurrentDatabase(db)
- assert(spark.catalog.findTable("tbl_y").name === "tbl_y")
+ assert(spark.catalog.getTable("tbl_y").name === "tbl_y")
}
}
}
- test("find function") {
+ test("get function") {
withTempDatabase { db =>
withUserDefinedFunction("fn1" -> true, s"$db.fn2" -> false) {
// Try to find non existing functions.
- intercept[AnalysisException](spark.catalog.findFunction("fn1"))
- intercept[AnalysisException](spark.catalog.findFunction("fn2"))
- intercept[AnalysisException](spark.catalog.findFunction(db, "fn2"))
+ intercept[AnalysisException](spark.catalog.getFunction("fn1"))
+ intercept[AnalysisException](spark.catalog.getFunction("fn2"))
+ intercept[AnalysisException](spark.catalog.getFunction(db, "fn2"))
// Create objects.
createTempFunction("fn1")
createFunction("fn2", Some(db))
// Find a temporary function
- assert(spark.catalog.findFunction("fn1").name === "fn1")
+ assert(spark.catalog.getFunction("fn1").name === "fn1")
// Find a qualified function
- assert(spark.catalog.findFunction(db, "fn2").name === "fn2")
+ assert(spark.catalog.getFunction(db, "fn2").name === "fn2")
// Find an unqualified function using the current database
- intercept[AnalysisException](spark.catalog.findFunction("fn2"))
+ intercept[AnalysisException](spark.catalog.getFunction("fn2"))
spark.catalog.setCurrentDatabase(db)
- assert(spark.catalog.findFunction("fn2").name === "fn2")
+ assert(spark.catalog.getFunction("fn2").name === "fn2")
}
}
}