From 247a4faf06c1dd47a6543c56929cd0182a03e106 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Fri, 2 Sep 2016 22:31:01 +0800 Subject: [SPARK-16935][SQL] Verification of Function-related ExternalCatalog APIs ### What changes were proposed in this pull request? Function-related `HiveExternalCatalog` APIs do not have enough verification logics. After the PR, `HiveExternalCatalog` and `InMemoryCatalog` become consistent in the error handling. For example, below is the exception we got when calling `renameFunction`. ``` 15:13:40.369 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database db1, returning NoSuchObjectException 15:13:40.377 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database db2, returning NoSuchObjectException 15:13:40.739 ERROR DataNucleus.Datastore.Persist: Update of object "org.apache.hadoop.hive.metastore.model.MFunction205629e9" using statement "UPDATE FUNCS SET FUNC_NAME=? WHERE FUNC_ID=?" failed : org.apache.derby.shared.common.error.DerbySQLIntegrityConstraintViolationException: The statement was aborted because it would have caused a duplicate key value in a unique or primary key constraint or unique index identified by 'UNIQUEFUNCTION' defined on 'FUNCS'. at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) ``` ### How was this patch tested? Improved the existing test cases to check whether the messages are right. Author: gatorsmile Closes #14521 from gatorsmile/functionChecking. --- .../sql/catalyst/catalog/ExternalCatalogSuite.scala | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) (limited to 'sql/catalyst/src/test') diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala index 19f8665383..f283f4287c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala @@ -25,6 +25,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.SparkFunSuite import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} +import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException} import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException import org.apache.spark.sql.types.StructType import org.apache.spark.util.Utils @@ -450,14 +451,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("create function when database does not exist") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[NoSuchDatabaseException] { catalog.createFunction("does_not_exist", newFunc()) } } test("create function that already exists") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[FunctionAlreadyExistsException] { catalog.createFunction("db2", newFunc("func1")) } } @@ -471,14 +472,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("drop function when database does not exist") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[NoSuchDatabaseException] { catalog.dropFunction("does_not_exist", "something") } } test("drop function that does not exist") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[NoSuchFunctionException] { catalog.dropFunction("db2", "does_not_exist") } } @@ -488,14 +489,14 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac assert(catalog.getFunction("db2", "func1") == CatalogFunction(FunctionIdentifier("func1", Some("db2")), funcClass, Seq.empty[FunctionResource])) - intercept[AnalysisException] { + intercept[NoSuchFunctionException] { catalog.getFunction("db2", "does_not_exist") } } test("get function when database does not exist") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[NoSuchDatabaseException] { catalog.getFunction("does_not_exist", "func1") } } @@ -505,15 +506,15 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac val newName = "funcky" assert(catalog.getFunction("db2", "func1").className == funcClass) catalog.renameFunction("db2", "func1", newName) - intercept[AnalysisException] { catalog.getFunction("db2", "func1") } + intercept[NoSuchFunctionException] { catalog.getFunction("db2", "func1") } assert(catalog.getFunction("db2", newName).identifier.funcName == newName) assert(catalog.getFunction("db2", newName).className == funcClass) - intercept[AnalysisException] { catalog.renameFunction("db2", "does_not_exist", "me") } + intercept[NoSuchFunctionException] { catalog.renameFunction("db2", "does_not_exist", "me") } } test("rename function when database does not exist") { val catalog = newBasicCatalog() - intercept[AnalysisException] { + intercept[NoSuchDatabaseException] { catalog.renameFunction("does_not_exist", "func1", "func5") } } @@ -521,7 +522,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac test("rename function when new function already exists") { val catalog = newBasicCatalog() catalog.createFunction("db2", newFunc("func2", Some("db2"))) - intercept[AnalysisException] { + intercept[FunctionAlreadyExistsException] { catalog.renameFunction("db2", "func1", "func2") } } -- cgit v1.2.3