diff options
author | Xin Wu <xinwu@us.ibm.com> | 2016-06-03 14:26:48 -0700 |
---|---|---|
committer | Marcelo Vanzin <vanzin@cloudera.com> | 2016-06-03 14:26:48 -0700 |
commit | 28ad0f7b0dc7bf24fac251c4f131aca74ba1c1d2 (patch) | |
tree | 5c5053c797f3c7d4184a53949970cf2229a97cb6 /core/src/main | |
parent | 61b80d552aafb262b5f817f7bc9c0acd0328715b (diff) | |
download | spark-28ad0f7b0dc7bf24fac251c4f131aca74ba1c1d2.tar.gz spark-28ad0f7b0dc7bf24fac251c4f131aca74ba1c1d2.tar.bz2 spark-28ad0f7b0dc7bf24fac251c4f131aca74ba1c1d2.zip |
[SPARK-15681][CORE] allow lowercase or mixed case log level string when calling sc.setLogLevel
## What changes were proposed in this pull request?
Currently `SparkContext API setLogLevel(level: String) `can not handle lower case or mixed case input string. But `org.apache.log4j.Level.toLevel` can take lowercase or mixed case.
This PR is to allow case-insensitive user input for the log level.
## How was this patch tested?
A unit testcase is added.
Author: Xin Wu <xinwu@us.ibm.com>
Closes #13422 from xwu0226/reset_loglevel.
Diffstat (limited to 'core/src/main')
-rw-r--r-- | core/src/main/scala/org/apache/spark/SparkContext.scala | 16 |
1 files changed, 9 insertions, 7 deletions
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 5aba2a8c94..33b11ed2e6 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -20,7 +20,7 @@ package org.apache.spark import java.io._ import java.lang.reflect.Constructor import java.net.URI -import java.util.{Arrays, Properties, ServiceLoader, UUID} +import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID} import java.util.concurrent.ConcurrentMap import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference} @@ -356,12 +356,12 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli * Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN */ def setLogLevel(logLevel: String) { - val validLevels = Seq("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN") - if (!validLevels.contains(logLevel)) { - throw new IllegalArgumentException( - s"Supplied level $logLevel did not match one of: ${validLevels.mkString(",")}") - } - Utils.setLogLevel(org.apache.log4j.Level.toLevel(logLevel)) + // let's allow lowcase or mixed case too + val upperCased = logLevel.toUpperCase(Locale.ENGLISH) + require(SparkContext.VALID_LOG_LEVELS.contains(upperCased), + s"Supplied level $logLevel did not match one of:" + + s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}") + Utils.setLogLevel(org.apache.log4j.Level.toLevel(upperCased)) } try { @@ -2179,6 +2179,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli * various Spark features. */ object SparkContext extends Logging { + private val VALID_LOG_LEVELS = + Set("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN") /** * Lock that guards access to global variables that track SparkContext construction. |