aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorCheng Hao <hao.cheng@intel.com>2014-09-12 11:29:30 -0700
committerMichael Armbrust <michael@databricks.com>2014-09-12 11:29:44 -0700
commit6cbf83c05c7a073d4df81b59a1663fea38ce65f6 (patch)
tree0937c80152e75031ca5cb3d9d45442b1780c6924 /sql/hive
parentf17b7957a4283952021d9e4106c5bd9994148128 (diff)
downloadspark-6cbf83c05c7a073d4df81b59a1663fea38ce65f6.tar.gz
spark-6cbf83c05c7a073d4df81b59a1663fea38ce65f6.tar.bz2
spark-6cbf83c05c7a073d4df81b59a1663fea38ce65f6.zip
[SPARK-3481] [SQL] Eliminate the error log in local Hive comparison test
Logically, we should remove the Hive Table/Database first and then reset the Hive configuration, repoint to the new data warehouse directory etc. Otherwise it raised exceptions like "Database doesn't not exists: default" in the local testing. Author: Cheng Hao <hao.cheng@intel.com> Closes #2352 from chenghao-intel/test_hive and squashes the following commits: 74fd76b [Cheng Hao] eliminate the error log (cherry picked from commit 8194fc662c08eb445444c207264e22361def54ea) Signed-off-by: Michael Armbrust <michael@databricks.com>
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala17
1 files changed, 8 insertions, 9 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
index a013f3f7a8..8bb2216b7b 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
@@ -309,15 +309,6 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
log.asInstanceOf[org.apache.log4j.Logger].setLevel(org.apache.log4j.Level.WARN)
}
- // It is important that we RESET first as broken hooks that might have been set could break
- // other sql exec here.
- runSqlHive("RESET")
- // For some reason, RESET does not reset the following variables...
- runSqlHive("set datanucleus.cache.collections=true")
- runSqlHive("set datanucleus.cache.collections.lazy=true")
- // Lots of tests fail if we do not change the partition whitelist from the default.
- runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*")
-
loadedTables.clear()
catalog.client.getAllTables("default").foreach { t =>
logDebug(s"Deleting table $t")
@@ -343,6 +334,14 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
FunctionRegistry.unregisterTemporaryUDF(udfName)
}
+ // It is important that we RESET first as broken hooks that might have been set could break
+ // other sql exec here.
+ runSqlHive("RESET")
+ // For some reason, RESET does not reset the following variables...
+ runSqlHive("set datanucleus.cache.collections=true")
+ runSqlHive("set datanucleus.cache.collections.lazy=true")
+ // Lots of tests fail if we do not change the partition whitelist from the default.
+ runSqlHive("set hive.metastore.partition.name.whitelist.pattern=.*")
configure()
runSqlHive("USE default")