aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2016-03-14 14:27:33 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2016-03-14 14:27:33 -0700
commit8301fadd8d269da11e72870b7a889596e3337839 (patch)
tree74c42c6c99d0438f3669acdae8982f3632259254 /core
parent38529d8f2350feb1f143aab0be336050c0f887f2 (diff)
downloadspark-8301fadd8d269da11e72870b7a889596e3337839.tar.gz
spark-8301fadd8d269da11e72870b7a889596e3337839.tar.bz2
spark-8301fadd8d269da11e72870b7a889596e3337839.zip
[SPARK-13626][CORE] Avoid duplicate config deprecation warnings.
Three different things were needed to get rid of spurious warnings: - silence deprecation warnings when cloning configuration - change the way SparkHadoopUtil instantiates SparkConf to silence warnings - avoid creating new SparkConf instances where it's not needed. On top of that, I changed the way that Logging.scala detects the repl; now it uses a method that is overridden in the repl's Main class, and the hack in Utils.scala is not needed anymore. This makes the 2.11 repl behave like the 2.10 one and set the default log level to WARN, which is a lot better. Previously, this wasn't working because the 2.11 repl triggers log initialization earlier than the 2.10 one. I also removed and simplified some other code in the 2.11 repl's Main to avoid replicating logic that already exists elsewhere in Spark. Tested the 2.11 repl in local and yarn modes. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #11510 from vanzin/SPARK-13626.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/Logging.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala23
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala9
4 files changed, 26 insertions, 20 deletions
diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
index 9e0a840b72..efab61e132 100644
--- a/core/src/main/scala/org/apache/spark/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/Logging.scala
@@ -43,7 +43,7 @@ private[spark] trait Logging {
// Method to get or create the logger for this object
protected def log: Logger = {
if (log_ == null) {
- initializeIfNecessary()
+ initializeLogIfNecessary(false)
log_ = LoggerFactory.getLogger(logName)
}
log_
@@ -95,17 +95,17 @@ private[spark] trait Logging {
log.isTraceEnabled
}
- private def initializeIfNecessary() {
+ protected def initializeLogIfNecessary(isInterpreter: Boolean): Unit = {
if (!Logging.initialized) {
Logging.initLock.synchronized {
if (!Logging.initialized) {
- initializeLogging()
+ initializeLogging(isInterpreter)
}
}
}
}
- private def initializeLogging() {
+ private def initializeLogging(isInterpreter: Boolean): Unit = {
// Don't use a logger in here, as this is itself occurring during initialization of a logger
// If Log4j 1.2 is being used, but is not initialized, load a default properties file
val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr
@@ -127,11 +127,11 @@ private[spark] trait Logging {
}
}
- if (Utils.isInInterpreter) {
+ if (isInterpreter) {
// Use the repl's main class to define the default log level when running the shell,
// overriding the root logger's config if they're different.
val rootLogger = LogManager.getRootLogger()
- val replLogger = LogManager.getLogger("org.apache.spark.repl.Main")
+ val replLogger = LogManager.getLogger(logName)
val replLevel = Option(replLogger.getLevel()).getOrElse(Level.WARN)
if (replLevel != rootLogger.getEffectiveLevel()) {
System.err.printf("Setting default log level to \"%s\".\n", replLevel)
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index ff8c631585..0e2d51f9e7 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -47,7 +47,7 @@ import org.apache.spark.util.Utils
*
* @param loadDefaults whether to also load values from Java system properties
*/
-class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
+class SparkConf private[spark] (loadDefaults: Boolean) extends Cloneable with Logging {
import SparkConf._
@@ -57,21 +57,32 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
private val settings = new ConcurrentHashMap[String, String]()
if (loadDefaults) {
+ loadFromSystemProperties(false)
+ }
+
+ private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = {
// Load any spark.* system properties
for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) {
- set(key, value)
+ set(key, value, silent)
}
+ this
}
/** Set a configuration variable. */
def set(key: String, value: String): SparkConf = {
+ set(key, value, false)
+ }
+
+ private[spark] def set(key: String, value: String, silent: Boolean): SparkConf = {
if (key == null) {
throw new NullPointerException("null key")
}
if (value == null) {
throw new NullPointerException("null value for " + key)
}
- logDeprecationWarning(key)
+ if (!silent) {
+ logDeprecationWarning(key)
+ }
settings.put(key, value)
this
}
@@ -395,7 +406,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Copy this object */
override def clone: SparkConf = {
- new SparkConf(false).setAll(getAll)
+ val cloned = new SparkConf(false)
+ settings.entrySet().asScala.foreach { e =>
+ cloned.set(e.getKey(), e.getValue(), true)
+ }
+ cloned
}
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 06b5101b1f..270ca84e24 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -45,7 +45,7 @@ import org.apache.spark.util.Utils
*/
@DeveloperApi
class SparkHadoopUtil extends Logging {
- private val sparkConf = new SparkConf()
+ private val sparkConf = new SparkConf(false).loadFromSystemProperties(true)
val conf: Configuration = newConfiguration(sparkConf)
UserGroupInformation.setConfiguration(conf)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 37c6c9bf90..63b9d34b79 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1820,15 +1820,6 @@ private[spark] object Utils extends Logging {
}
}
- lazy val isInInterpreter: Boolean = {
- try {
- val interpClass = classForName("org.apache.spark.repl.Main")
- interpClass.getMethod("interp").invoke(null) != null
- } catch {
- case _: ClassNotFoundException => false
- }
- }
-
/**
* Return a well-formed URI for the file described by a user input string.
*