aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRyan Williams <ryan.blake.williams@gmail.com>2015-02-17 10:57:16 -0800
committerPatrick Wendell <patrick@databricks.com>2015-02-17 10:57:16 -0800
commitd8f69cf78862d13a48392a0b94388b8d403523da (patch)
treef95761f507fb6fdc16b73e4c5f07a52e8c82d17f
parentd8adefefcc2a4af32295440ed1d4917a6968f017 (diff)
downloadspark-d8f69cf78862d13a48392a0b94388b8d403523da.tar.gz
spark-d8f69cf78862d13a48392a0b94388b8d403523da.tar.bz2
spark-d8f69cf78862d13a48392a0b94388b8d403523da.zip
[SPARK-5778] throw if nonexistent metrics config file provided
previous behavior was to log an error; this is fine in the general case where no `spark.metrics.conf` parameter was specified, in which case a default `metrics.properties` is looked for, and the execption logged and suppressed if it doesn't exist. if the user has purposefully specified a metrics.conf file, however, it makes more sense to show them an error when said file doesn't exist. Author: Ryan Williams <ryan.blake.williams@gmail.com> Closes #4571 from ryan-williams/metrics and squashes the following commits: 5bccb14 [Ryan Williams] private-ize some MetricsConfig members 08ff998 [Ryan Williams] rename METRICS_CONF: DEFAULT_METRICS_CONF_FILENAME f4d7fab [Ryan Williams] fix tests ad24b0e [Ryan Williams] add "metrics.properties" to .rat-excludes 94e810b [Ryan Williams] throw if nonexistent Sink class is specified 31d2c30 [Ryan Williams] metrics code review feedback 56287db [Ryan Williams] throw if nonexistent metrics config file provided
-rw-r--r--.rat-excludes1
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala32
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala5
-rw-r--r--core/src/test/resources/test_metrics_system.properties2
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala2
5 files changed, 23 insertions, 19 deletions
diff --git a/.rat-excludes b/.rat-excludes
index a788e8273d..8c61e67a0c 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -19,6 +19,7 @@ fairscheduler.xml.template
spark-defaults.conf.template
log4j.properties
log4j.properties.template
+metrics.properties
metrics.properties.template
slaves
slaves.template
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index 1b7a5d1f19..8edf493780 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -28,12 +28,12 @@ import org.apache.spark.util.Utils
private[spark] class MetricsConfig(val configFile: Option[String]) extends Logging {
- val DEFAULT_PREFIX = "*"
- val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
- val METRICS_CONF = "metrics.properties"
+ private val DEFAULT_PREFIX = "*"
+ private val INSTANCE_REGEX = "^(\\*|[a-zA-Z]+)\\.(.+)".r
+ private val DEFAULT_METRICS_CONF_FILENAME = "metrics.properties"
- val properties = new Properties()
- var propertyCategories: mutable.HashMap[String, Properties] = null
+ private[metrics] val properties = new Properties()
+ private[metrics] var propertyCategories: mutable.HashMap[String, Properties] = null
private def setDefaultProperties(prop: Properties) {
prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
@@ -47,20 +47,22 @@ private[spark] class MetricsConfig(val configFile: Option[String]) extends Loggi
setDefaultProperties(properties)
// If spark.metrics.conf is not set, try to get file in class path
- var is: InputStream = null
- try {
- is = configFile match {
- case Some(f) => new FileInputStream(f)
- case None => Utils.getSparkClassLoader.getResourceAsStream(METRICS_CONF)
+ val isOpt: Option[InputStream] = configFile.map(new FileInputStream(_)).orElse {
+ try {
+ Option(Utils.getSparkClassLoader.getResourceAsStream(DEFAULT_METRICS_CONF_FILENAME))
+ } catch {
+ case e: Exception =>
+ logError("Error loading default configuration file", e)
+ None
}
+ }
- if (is != null) {
+ isOpt.foreach { is =>
+ try {
properties.load(is)
+ } finally {
+ is.close()
}
- } catch {
- case e: Exception => logError("Error loading configure file", e)
- } finally {
- if (is != null) is.close()
}
propertyCategories = subProperties(properties, INSTANCE_REGEX)
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index 83e8eb7126..345db36630 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -191,7 +191,10 @@ private[spark] class MetricsSystem private (
sinks += sink.asInstanceOf[Sink]
}
} catch {
- case e: Exception => logError("Sink class " + classPath + " cannot be instantialized", e)
+ case e: Exception => {
+ logError("Sink class " + classPath + " cannot be instantialized")
+ throw e
+ }
}
}
}
diff --git a/core/src/test/resources/test_metrics_system.properties b/core/src/test/resources/test_metrics_system.properties
index 35d0bd3b8d..4e8b846569 100644
--- a/core/src/test/resources/test_metrics_system.properties
+++ b/core/src/test/resources/test_metrics_system.properties
@@ -18,7 +18,5 @@
*.sink.console.period = 10
*.sink.console.unit = seconds
test.sink.console.class = org.apache.spark.metrics.sink.ConsoleSink
-test.sink.dummy.class = org.apache.spark.metrics.sink.DummySink
-test.source.dummy.class = org.apache.spark.metrics.source.DummySource
test.sink.console.period = 20
test.sink.console.unit = minutes
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index 1a9ce8c607..37e528435a 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -27,7 +27,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
}
test("MetricsConfig with default properties") {
- val conf = new MetricsConfig(Option("dummy-file"))
+ val conf = new MetricsConfig(None)
conf.initialize()
assert(conf.properties.size() === 4)