aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorSandy Ryza <sandy@cloudera.com>2015-01-30 11:31:54 -0600
committerThomas Graves <tgraves@apache.org>2015-01-30 11:31:54 -0600
commit254eaa4d350dafe19f1715e80eb816856a126c21 (patch)
tree812b82129636867995d8bbf197495fb9581a5967 /core
parent6f21dce5f4619e1a5d07028e2a74dc36be0849b9 (diff)
downloadspark-254eaa4d350dafe19f1715e80eb816856a126c21.tar.gz
spark-254eaa4d350dafe19f1715e80eb816856a126c21.tar.bz2
spark-254eaa4d350dafe19f1715e80eb816856a126c21.zip
SPARK-5393. Flood of util.RackResolver log messages after SPARK-1714
Previously I had tried to solve this with by adding a line in Spark's log4j-defaults.properties. The issue with the message in log4j-defaults.properties was that the log4j.properties packaged inside Hadoop was getting picked up instead. While it would be ideal to fix that as well, we still want to quiet this in situations where a user supplies their own custom log4j properties. Author: Sandy Ryza <sandy@cloudera.com> Closes #4192 from sryza/sandy-spark-5393 and squashes the following commits: 4d5dedc [Sandy Ryza] Only set log level if unset 46e07c5 [Sandy Ryza] SPARK-5393. Flood of util.RackResolver log messages after SPARK-1714
Diffstat (limited to 'core')
-rw-r--r--core/src/main/resources/org/apache/spark/log4j-defaults.properties1
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala2
3 files changed, 2 insertions, 3 deletions
diff --git a/core/src/main/resources/org/apache/spark/log4j-defaults.properties b/core/src/main/resources/org/apache/spark/log4j-defaults.properties
index c99a61f63e..89eec7d4b7 100644
--- a/core/src/main/resources/org/apache/spark/log4j-defaults.properties
+++ b/core/src/main/resources/org/apache/spark/log4j-defaults.properties
@@ -10,4 +10,3 @@ log4j.logger.org.eclipse.jetty=WARN
log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-log4j.logger.org.apache.hadoop.yarn.util.RackResolver=WARN
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 4c4ee04cc5..3c61c10820 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1986,7 +1986,7 @@ object SparkContext extends Logging {
case "yarn-client" =>
val scheduler = try {
val clazz =
- Class.forName("org.apache.spark.scheduler.cluster.YarnClientClusterScheduler")
+ Class.forName("org.apache.spark.scheduler.cluster.YarnScheduler")
val cons = clazz.getConstructor(classOf[SparkContext])
cons.newInstance(sc).asInstanceOf[TaskSchedulerImpl]
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index 8ae4f243ec..bbed8ddc6b 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -149,7 +149,7 @@ class SparkContextSchedulerCreationSuite
}
test("yarn-client") {
- testYarn("yarn-client", "org.apache.spark.scheduler.cluster.YarnClientClusterScheduler")
+ testYarn("yarn-client", "org.apache.spark.scheduler.cluster.YarnScheduler")
}
def testMesos(master: String, expectedClass: Class[_], coarse: Boolean) {