aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src/test/scala
diff options
context:
space:
mode:
authorGuoQiang Li <witgo@qq.com>2014-08-02 13:55:28 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-08-02 13:59:58 -0700
commit4c477117bb1ffef463776c86f925d35036f96b7a (patch)
tree577c6b24c177c6a5ab36929877000907c83ffee4 /sql/hive/src/test/scala
parent91f9504e6086fac05b40545099f9818949c24bca (diff)
downloadspark-4c477117bb1ffef463776c86f925d35036f96b7a.tar.gz
spark-4c477117bb1ffef463776c86f925d35036f96b7a.tar.bz2
spark-4c477117bb1ffef463776c86f925d35036f96b7a.zip
SPARK-2804: Remove scalalogging-slf4j dependency
This also Closes #1701. Author: GuoQiang Li <witgo@qq.com> Closes #1208 from witgo/SPARK-1470 and squashes the following commits: 422646b [GuoQiang Li] Remove scalalogging-slf4j dependency
Diffstat (limited to 'sql/hive/src/test/scala')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala22
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala2
2 files changed, 12 insertions, 12 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
index 6c8fe4b196..83cfbc6b4a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
@@ -21,7 +21,7 @@ import java.io._
import org.scalatest.{BeforeAndAfterAll, FunSuite, GivenWhenThen}
-import org.apache.spark.sql.Logging
+import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.{NativeCommand => LogicalNativeCommand}
@@ -197,7 +197,7 @@ abstract class HiveComparisonTest
// If test sharding is enable, skip tests that are not in the correct shard.
shardInfo.foreach {
case (shardId, numShards) if testCaseName.hashCode % numShards != shardId => return
- case (shardId, _) => logger.debug(s"Shard $shardId includes test '$testCaseName'")
+ case (shardId, _) => logDebug(s"Shard $shardId includes test '$testCaseName'")
}
// Skip tests found in directories specified by user.
@@ -213,13 +213,13 @@ abstract class HiveComparisonTest
.map(new File(_, testCaseName))
.filter(_.exists)
if (runOnlyDirectories.nonEmpty && runIndicators.isEmpty) {
- logger.debug(
+ logDebug(
s"Skipping test '$testCaseName' not found in ${runOnlyDirectories.map(_.getCanonicalPath)}")
return
}
test(testCaseName) {
- logger.debug(s"=== HIVE TEST: $testCaseName ===")
+ logDebug(s"=== HIVE TEST: $testCaseName ===")
// Clear old output for this testcase.
outputDirectories.map(new File(_, testCaseName)).filter(_.exists()).foreach(_.delete())
@@ -235,7 +235,7 @@ abstract class HiveComparisonTest
.filterNot(_ contains "hive.outerjoin.supports.filters")
if (allQueries != queryList)
- logger.warn(s"Simplifications made on unsupported operations for test $testCaseName")
+ logWarning(s"Simplifications made on unsupported operations for test $testCaseName")
lazy val consoleTestCase = {
val quotes = "\"\"\""
@@ -257,11 +257,11 @@ abstract class HiveComparisonTest
}
val hiveCachedResults = hiveCacheFiles.flatMap { cachedAnswerFile =>
- logger.debug(s"Looking for cached answer file $cachedAnswerFile.")
+ logDebug(s"Looking for cached answer file $cachedAnswerFile.")
if (cachedAnswerFile.exists) {
Some(fileToString(cachedAnswerFile))
} else {
- logger.debug(s"File $cachedAnswerFile not found")
+ logDebug(s"File $cachedAnswerFile not found")
None
}
}.map {
@@ -272,7 +272,7 @@ abstract class HiveComparisonTest
val hiveResults: Seq[Seq[String]] =
if (hiveCachedResults.size == queryList.size) {
- logger.info(s"Using answer cache for test: $testCaseName")
+ logInfo(s"Using answer cache for test: $testCaseName")
hiveCachedResults
} else {
@@ -287,7 +287,7 @@ abstract class HiveComparisonTest
if (installHooksCommand.findAllMatchIn(queryString).nonEmpty)
sys.error("hive exec hooks not supported for tests.")
- logger.warn(s"Running query ${i+1}/${queryList.size} with hive.")
+ logWarning(s"Running query ${i+1}/${queryList.size} with hive.")
// Analyze the query with catalyst to ensure test tables are loaded.
val answer = hiveQuery.analyzed match {
case _: ExplainCommand => Nil // No need to execute EXPLAIN queries as we don't check the output.
@@ -351,7 +351,7 @@ abstract class HiveComparisonTest
val resultComparison = sideBySide(hivePrintOut, catalystPrintOut).mkString("\n")
if (recomputeCache) {
- logger.warn(s"Clearing cache files for failed test $testCaseName")
+ logWarning(s"Clearing cache files for failed test $testCaseName")
hiveCacheFiles.foreach(_.delete())
}
@@ -380,7 +380,7 @@ abstract class HiveComparisonTest
TestHive.runSqlHive("SELECT key FROM src")
} catch {
case e: Exception =>
- logger.error(s"FATAL ERROR: Canary query threw $e This implies that the testing environment has likely been corrupted.")
+ logError(s"FATAL ERROR: Canary query threw $e This implies that the testing environment has likely been corrupted.")
// The testing setup traps exits so wait here for a long time so the developer can see when things started
// to go wrong.
Thread.sleep(1000000)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala
index 50ab71a900..02518d5162 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala
@@ -53,7 +53,7 @@ abstract class HiveQueryFileTest extends HiveComparisonTest {
testCases.sorted.foreach {
case (testCaseName, testCaseFile) =>
if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) {
- logger.debug(s"Blacklisted test skipped $testCaseName")
+ logDebug(s"Blacklisted test skipped $testCaseName")
} else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) || runAll) {
// Build a test case and submit it to scala test framework...
val queriesString = fileToString(testCaseFile)