diff options
author | Xiao Li <gatorsmile@gmail.com> | 2017-03-20 22:52:45 -0700 |
---|---|---|
committer | Xiao Li <gatorsmile@gmail.com> | 2017-03-20 22:52:45 -0700 |
commit | d2dcd6792f4cea39e12945ad8c4cda5d8d034de4 (patch) | |
tree | b44d2d7d79e075775a837413bee0e1b5494602e4 /sql/catalyst | |
parent | 68d65fae71e475ad811a9716098aca03a2af9532 (diff) | |
download | spark-d2dcd6792f4cea39e12945ad8c4cda5d8d034de4.tar.gz spark-d2dcd6792f4cea39e12945ad8c4cda5d8d034de4.tar.bz2 spark-d2dcd6792f4cea39e12945ad8c4cda5d8d034de4.zip |
[SPARK-20024][SQL][TEST-MAVEN] SessionCatalog reset need to set the current database of ExternalCatalog
### What changes were proposed in this pull request?
SessionCatalog API setCurrentDatabase does not set the current database of the underlying ExternalCatalog. Thus, weird errors could come in the test suites after we call reset. We need to fix it.
So far, have not found the direct impact in the other code paths because we expect all the SessionCatalog APIs should always use the current database value we managed, unless some of code paths skip it. Thus, we fix it in the test-only function reset().
### How was this patch tested?
Multiple test case failures are observed in mvn and add a test case in SessionCatalogSuite.
Author: Xiao Li <gatorsmile@gmail.com>
Closes #17354 from gatorsmile/useDB.
Diffstat (limited to 'sql/catalyst')
2 files changed, 1 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 25aa8d3ba9..b134fd44a3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -1175,6 +1175,7 @@ class SessionCatalog( */ def reset(): Unit = synchronized { setCurrentDatabase(DEFAULT_DATABASE) + externalCatalog.setCurrentDatabase(DEFAULT_DATABASE) listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db => dropDatabase(db, ignoreIfNotExists = false, cascade = true) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index bb87763e0b..fd9e5d6bb1 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -53,7 +53,6 @@ abstract class SessionCatalogSuite extends PlanTest { private def withBasicCatalog(f: SessionCatalog => Unit): Unit = { val catalog = new SessionCatalog(newBasicCatalog()) - catalog.createDatabase(newDb("default"), ignoreIfExists = true) try { f(catalog) } finally { @@ -76,7 +75,6 @@ abstract class SessionCatalogSuite extends PlanTest { test("basic create and list databases") { withEmptyCatalog { catalog => - catalog.createDatabase(newDb("default"), ignoreIfExists = true) assert(catalog.databaseExists("default")) assert(!catalog.databaseExists("testing")) assert(!catalog.databaseExists("testing2")) |