aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/main/scala')
-rw-r--r--core/src/main/scala/org/apache/spark/Logging.scala12
-rw-r--r--core/src/main/scala/org/apache/spark/SparkConf.scala23
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala9
4 files changed, 26 insertions, 20 deletions
diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
index 9e0a840b72..efab61e132 100644
--- a/core/src/main/scala/org/apache/spark/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/Logging.scala
@@ -43,7 +43,7 @@ private[spark] trait Logging {
// Method to get or create the logger for this object
protected def log: Logger = {
if (log_ == null) {
- initializeIfNecessary()
+ initializeLogIfNecessary(false)
log_ = LoggerFactory.getLogger(logName)
}
log_
@@ -95,17 +95,17 @@ private[spark] trait Logging {
log.isTraceEnabled
}
- private def initializeIfNecessary() {
+ protected def initializeLogIfNecessary(isInterpreter: Boolean): Unit = {
if (!Logging.initialized) {
Logging.initLock.synchronized {
if (!Logging.initialized) {
- initializeLogging()
+ initializeLogging(isInterpreter)
}
}
}
}
- private def initializeLogging() {
+ private def initializeLogging(isInterpreter: Boolean): Unit = {
// Don't use a logger in here, as this is itself occurring during initialization of a logger
// If Log4j 1.2 is being used, but is not initialized, load a default properties file
val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr
@@ -127,11 +127,11 @@ private[spark] trait Logging {
}
}
- if (Utils.isInInterpreter) {
+ if (isInterpreter) {
// Use the repl's main class to define the default log level when running the shell,
// overriding the root logger's config if they're different.
val rootLogger = LogManager.getRootLogger()
- val replLogger = LogManager.getLogger("org.apache.spark.repl.Main")
+ val replLogger = LogManager.getLogger(logName)
val replLevel = Option(replLogger.getLevel()).getOrElse(Level.WARN)
if (replLevel != rootLogger.getEffectiveLevel()) {
System.err.printf("Setting default log level to \"%s\".\n", replLevel)
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index ff8c631585..0e2d51f9e7 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -47,7 +47,7 @@ import org.apache.spark.util.Utils
*
* @param loadDefaults whether to also load values from Java system properties
*/
-class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
+class SparkConf private[spark] (loadDefaults: Boolean) extends Cloneable with Logging {
import SparkConf._
@@ -57,21 +57,32 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
private val settings = new ConcurrentHashMap[String, String]()
if (loadDefaults) {
+ loadFromSystemProperties(false)
+ }
+
+ private[spark] def loadFromSystemProperties(silent: Boolean): SparkConf = {
// Load any spark.* system properties
for ((key, value) <- Utils.getSystemProperties if key.startsWith("spark.")) {
- set(key, value)
+ set(key, value, silent)
}
+ this
}
/** Set a configuration variable. */
def set(key: String, value: String): SparkConf = {
+ set(key, value, false)
+ }
+
+ private[spark] def set(key: String, value: String, silent: Boolean): SparkConf = {
if (key == null) {
throw new NullPointerException("null key")
}
if (value == null) {
throw new NullPointerException("null value for " + key)
}
- logDeprecationWarning(key)
+ if (!silent) {
+ logDeprecationWarning(key)
+ }
settings.put(key, value)
this
}
@@ -395,7 +406,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Copy this object */
override def clone: SparkConf = {
- new SparkConf(false).setAll(getAll)
+ val cloned = new SparkConf(false)
+ settings.entrySet().asScala.foreach { e =>
+ cloned.set(e.getKey(), e.getValue(), true)
+ }
+ cloned
}
/**
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 06b5101b1f..270ca84e24 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -45,7 +45,7 @@ import org.apache.spark.util.Utils
*/
@DeveloperApi
class SparkHadoopUtil extends Logging {
- private val sparkConf = new SparkConf()
+ private val sparkConf = new SparkConf(false).loadFromSystemProperties(true)
val conf: Configuration = newConfiguration(sparkConf)
UserGroupInformation.setConfiguration(conf)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 37c6c9bf90..63b9d34b79 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -1820,15 +1820,6 @@ private[spark] object Utils extends Logging {
}
}
- lazy val isInInterpreter: Boolean = {
- try {
- val interpClass = classForName("org.apache.spark.repl.Main")
- interpClass.getMethod("interp").invoke(null) != null
- } catch {
- case _: ClassNotFoundException => false
- }
- }
-
/**
* Return a well-formed URI for the file described by a user input string.
*