aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorXiao Li <gatorsmile@gmail.com>2017-04-04 18:57:46 +0800
committerWenchen Fan <wenchen@databricks.com>2017-04-04 18:57:46 +0800
commit26e7bca2295faeef22b2d9554f316c97bc240fd7 (patch)
tree0d23926689539b4a47b9333d0dbf2018d942172b /sql/core/src/test
parentc95fbea68e9dfb2c96a1d13dde17d80a37066ae6 (diff)
downloadspark-26e7bca2295faeef22b2d9554f316c97bc240fd7.tar.gz
spark-26e7bca2295faeef22b2d9554f316c97bc240fd7.tar.bz2
spark-26e7bca2295faeef22b2d9554f316c97bc240fd7.zip
[SPARK-20198][SQL] Remove the inconsistency in table/function name conventions in SparkSession.Catalog APIs
### What changes were proposed in this pull request? Observed by felixcheung , in `SparkSession`.`Catalog` APIs, we have different conventions/rules for table/function identifiers/names. Most APIs accept the qualified name (i.e., `databaseName`.`tableName` or `databaseName`.`functionName`). However, the following five APIs do not accept it. - def listColumns(tableName: String): Dataset[Column] - def getTable(tableName: String): Table - def getFunction(functionName: String): Function - def tableExists(tableName: String): Boolean - def functionExists(functionName: String): Boolean To make them consistent with the other Catalog APIs, this PR does the changes, updates the function/API comments and adds the `params` to clarify the inputs we allow. ### How was this patch tested? Added the test cases . Author: Xiao Li <gatorsmile@gmail.com> Closes #17518 from gatorsmile/tableIdentifier.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala21
1 files changed, 21 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index 9742b3b2d5..6469e501c1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -102,6 +102,11 @@ class CatalogSuite
assert(col.isPartition == tableMetadata.partitionColumnNames.contains(col.name))
assert(col.isBucket == bucketColumnNames.contains(col.name))
}
+
+ dbName.foreach { db =>
+ val expected = columns.collect().map(_.name).toSet
+ assert(spark.catalog.listColumns(s"$db.$tableName").collect().map(_.name).toSet == expected)
+ }
}
override def afterEach(): Unit = {
@@ -345,6 +350,7 @@ class CatalogSuite
// Find a qualified table
assert(spark.catalog.getTable(db, "tbl_y").name === "tbl_y")
+ assert(spark.catalog.getTable(s"$db.tbl_y").name === "tbl_y")
// Find an unqualified table using the current database
intercept[AnalysisException](spark.catalog.getTable("tbl_y"))
@@ -378,6 +384,11 @@ class CatalogSuite
assert(fn2.database === db)
assert(!fn2.isTemporary)
+ val fn2WithQualifiedName = spark.catalog.getFunction(s"$db.fn2")
+ assert(fn2WithQualifiedName.name === "fn2")
+ assert(fn2WithQualifiedName.database === db)
+ assert(!fn2WithQualifiedName.isTemporary)
+
// Find an unqualified function using the current database
intercept[AnalysisException](spark.catalog.getFunction("fn2"))
spark.catalog.setCurrentDatabase(db)
@@ -403,6 +414,7 @@ class CatalogSuite
assert(!spark.catalog.tableExists("tbl_x"))
assert(!spark.catalog.tableExists("tbl_y"))
assert(!spark.catalog.tableExists(db, "tbl_y"))
+ assert(!spark.catalog.tableExists(s"$db.tbl_y"))
// Create objects.
createTempTable("tbl_x")
@@ -413,11 +425,15 @@ class CatalogSuite
// Find a qualified table
assert(spark.catalog.tableExists(db, "tbl_y"))
+ assert(spark.catalog.tableExists(s"$db.tbl_y"))
// Find an unqualified table using the current database
assert(!spark.catalog.tableExists("tbl_y"))
spark.catalog.setCurrentDatabase(db)
assert(spark.catalog.tableExists("tbl_y"))
+
+ // Unable to find the table, although the temp view with the given name exists
+ assert(!spark.catalog.tableExists(db, "tbl_x"))
}
}
}
@@ -429,6 +445,7 @@ class CatalogSuite
assert(!spark.catalog.functionExists("fn1"))
assert(!spark.catalog.functionExists("fn2"))
assert(!spark.catalog.functionExists(db, "fn2"))
+ assert(!spark.catalog.functionExists(s"$db.fn2"))
// Create objects.
createTempFunction("fn1")
@@ -439,11 +456,15 @@ class CatalogSuite
// Find a qualified function
assert(spark.catalog.functionExists(db, "fn2"))
+ assert(spark.catalog.functionExists(s"$db.fn2"))
// Find an unqualified function using the current database
assert(!spark.catalog.functionExists("fn2"))
spark.catalog.setCurrentDatabase(db)
assert(spark.catalog.functionExists("fn2"))
+
+ // Unable to find the function, although the temp function with the given name exists
+ assert(!spark.catalog.functionExists(db, "fn1"))
}
}
}