diff options
author | Patrick Wendell <pwendell@gmail.com> | 2013-07-31 21:35:12 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2013-07-31 21:35:12 -0700 |
commit | 5cc725a0e3ef523affae8ff54dd74707e49d64e3 (patch) | |
tree | ebd1698333d2df4194f17a9ea93a2f2eac2c7acd /conf/metrics.properties.template | |
parent | b7b627d5bb1a1331ea580950834533f84735df4c (diff) | |
parent | f3cf09491a2b63e19a15e98cf815da503e4fb69b (diff) | |
download | spark-5cc725a0e3ef523affae8ff54dd74707e49d64e3.tar.gz spark-5cc725a0e3ef523affae8ff54dd74707e49d64e3.tar.bz2 spark-5cc725a0e3ef523affae8ff54dd74707e49d64e3.zip |
Merge branch 'master' into ec2-updates
Conflicts:
ec2/deploy.generic/root/mesos-ec2/ec2-variables.sh
Diffstat (limited to 'conf/metrics.properties.template')
-rw-r--r-- | conf/metrics.properties.template | 87 |
1 files changed, 87 insertions, 0 deletions
diff --git a/conf/metrics.properties.template b/conf/metrics.properties.template new file mode 100644 index 0000000000..0486ca4c79 --- /dev/null +++ b/conf/metrics.properties.template @@ -0,0 +1,87 @@ +# syntax: [instance].[sink|source].[name].[options] + +# "instance" specify "who" (the role) use metrics system. In spark there are +# several roles like master, worker, executor, driver, these roles will +# create metrics system for monitoring. So instance represents these roles. +# Currently in Spark, several instances have already implemented: master, +# worker, executor, driver. +# +# [instance] field can be "master", "worker", "executor", "driver", which means +# only the specified instance has this property. +# a wild card "*" can be used to represent instance name, which means all the +# instances will have this property. +# +# "source" specify "where" (source) to collect metrics data. In metrics system, +# there exists two kinds of source: +# 1. Spark internal source, like MasterSource, WorkerSource, etc, which will +# collect Spark component's internal state, these sources are related to +# instance and will be added after specific metrics system is created. +# 2. Common source, like JvmSource, which will collect low level state, is +# configured by configuration and loaded through reflection. +# +# "sink" specify "where" (destination) to output metrics data to. Several sinks +# can be coexisted and flush metrics to all these sinks. +# +# [sink|source] field specify this property is source related or sink, this +# field can only be source or sink. +# +# [name] field specify the name of source or sink, this is custom defined. +# +# [options] field is the specific property of this source or sink, this source +# or sink is responsible for parsing this property. +# +# Notes: +# 1. Sinks should be added through configuration, like console sink, class +# full name should be specified by class property. +# 2. Some sinks can specify polling period, like console sink, which is 10 seconds, +# it should be attention minimal polling period is 1 seconds, any period +# below than 1s is illegal. +# 3. Wild card property can be overlapped by specific instance property, for +# example, *.sink.console.period can be overlapped by master.sink.console.period. +# 4. A metrics specific configuration +# "spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties" should be +# added to Java property using -Dspark.metrics.conf=xxx if you want to +# customize metrics system, or you can put it in ${SPARK_HOME}/conf, +# metrics system will search and load it automatically. + +# Enable JmxSink for all instances by class name +#*.sink.jmx.class=spark.metrics.sink.JmxSink + +# Enable ConsoleSink for all instances by class name +#*.sink.console.class=spark.metrics.sink.ConsoleSink + +# Polling period for ConsoleSink +#*.sink.console.period=10 + +#*.sink.console.unit=seconds + +# Master instance overlap polling period +#master.sink.console.period=15 + +#master.sink.console.unit=seconds + +# Enable CsvSink for all instances +#*.sink.csv.class=spark.metrics.sink.CsvSink + +# Polling period for CsvSink +#*.sink.csv.period=1 + +#*.sink.csv.unit=minutes + +# Polling directory for CsvSink +#*.sink.csv.directory=/tmp/ + +# Worker instance overlap polling period +#worker.sink.csv.period=10 + +#worker.sink.csv.unit=minutes + +# Enable jvm source for instance master, worker, driver and executor +#master.source.jvm.class=spark.metrics.source.JvmSource + +#worker.source.jvm.class=spark.metrics.source.JvmSource + +#driver.source.jvm.class=spark.metrics.source.JvmSource + +#executor.source.jvm.class=spark.metrics.source.JvmSource + |