aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIvan Topolnjak <ivantopo@gmail.com>2014-07-29 00:56:39 -0300
committerIvan Topolnjak <ivantopo@gmail.com>2014-07-29 00:56:39 -0300
commit96f31cbac82bf350db61e4b48ea95bb2706cc02b (patch)
tree482a479a898e77f2484a48daeafae962613bd468
parentec97bccd2346aa8c50997ec1f198a5eb6c79eab5 (diff)
parente83babb5f88e91661bec2f1013fcb6b03612bea9 (diff)
downloadKamon-96f31cbac82bf350db61e4b48ea95bb2706cc02b.tar.gz
Kamon-96f31cbac82bf350db61e4b48ea95bb2706cc02b.tar.bz2
Kamon-96f31cbac82bf350db61e4b48ea95bb2706cc02b.zip
Merge branch 'master' into release-0.3_scala-2.11
Conflicts: .travis.yml project/Settings.scala
-rw-r--r--.travis.yml6
-rw-r--r--kamon-core/src/main/java/kamon/util/Example.java8
-rw-r--r--kamon-core/src/main/resources/META-INF/aop.xml25
-rw-r--r--kamon-core/src/main/resources/reference.conf59
-rw-r--r--kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala92
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala17
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala (renamed from kamon-core/src/main/scala/akka/instrumentation/ActorMessagePassingTracing.scala)62
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/ActorLoggingTracing.scala)12
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala (renamed from kamon-core/src/main/scala/akka/instrumentation/ActorSystemMessagePassingTracing.scala)59
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala (renamed from kamon-core/src/main/scala/akka/instrumentation/AskPatternTracing.scala)17
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala (renamed from kamon-core/src/main/scala/akka/instrumentation/DispatcherTracing.scala)34
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala (renamed from kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala)29
-rw-r--r--kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/FutureTracing.scala)37
-rw-r--r--kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala89
-rw-r--r--kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala88
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala75
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala (renamed from kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala)52
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Scale.scala (renamed from kamon-core/src/main/scala/kamon/metrics/Scale.scala)2
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Subscriptions.scala173
-rw-r--r--kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala77
-rw-r--r--kamon-core/src/main/scala/kamon/metric/UserMetrics.scala163
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala59
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala82
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala263
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala116
-rw-r--r--kamon-core/src/main/scala/kamon/metric/package.scala (renamed from kamon-core/src/main/scala/kamon/metrics/package.scala)7
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala70
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala52
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala71
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/Metrics.scala121
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala129
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala66
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala52
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala78
-rw-r--r--kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala58
-rw-r--r--kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala61
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceContext.scala8
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala9
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/instrumentation/ActorMessagePassingTracingSpec.scala)18
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/instrumentation/ActorLoggingSpec.scala)11
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/instrumentation/ActorSystemMessagePassingInstrumentationSpec.scala)17
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/instrumentation/AskPatternTracingSpec.scala)15
-rw-r--r--kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/trace/instrumentation/FutureTracingSpec.scala)15
-rw-r--r--kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala205
-rw-r--r--kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala (renamed from kamon-core/src/test/scala/kamon/metrics/DispatcherMetricsSpec.scala)17
-rw-r--r--kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala133
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala (renamed from kamon-core/src/test/scala/kamon/metrics/TickMetricSnapshotBufferSpec.scala)68
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala95
-rw-r--r--kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala303
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala55
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala72
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala130
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala108
-rw-r--r--kamon-core/src/test/scala/kamon/metrics/ActorMetricsSpec.scala172
-rw-r--r--kamon-core/src/test/scala/kamon/metrics/CustomMetricSpec.scala78
-rw-r--r--kamon-core/src/test/scala/kamon/metrics/MetricSnapshotSpec.scala72
-rw-r--r--kamon-core/src/test/scala/kamon/metrics/instrument/MinMaxCounterSpec.scala110
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala95
-rw-r--r--kamon-datadog/src/main/resources/reference.conf4
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala27
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala54
-rw-r--r--kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala70
-rw-r--r--kamon-examples/kamon-play-example/app/Global.scala (renamed from kamon-examples/kamon-play-newrelic-example/app/controllers/NewRelicExample.scala)24
-rw-r--r--kamon-examples/kamon-play-example/app/controllers/KamonPlayExample.scala71
-rw-r--r--kamon-examples/kamon-play-example/app/filters/TraceLocalFilter.scala52
-rw-r--r--kamon-examples/kamon-play-example/conf/application.conf61
-rw-r--r--kamon-examples/kamon-play-example/conf/logger.xml (renamed from kamon-examples/kamon-play-newrelic-example/conf/logger.xml)0
-rw-r--r--kamon-examples/kamon-play-example/conf/routes3
-rw-r--r--kamon-examples/kamon-play-example/project/Build.scala48
-rw-r--r--kamon-examples/kamon-play-example/project/build.properties1
-rw-r--r--kamon-examples/kamon-play-example/project/plugins.sbt (renamed from kamon-examples/kamon-play-newrelic-example/project/plugins.sbt)2
-rw-r--r--kamon-examples/kamon-play-example/public/images/favicon.png (renamed from kamon-examples/kamon-play-newrelic-example/public/images/favicon.png)bin687 -> 687 bytes
-rw-r--r--kamon-examples/kamon-play-example/public/images/glyphicons-halflings-white.png (renamed from kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings-white.png)bin8777 -> 8777 bytes
-rw-r--r--kamon-examples/kamon-play-example/public/images/glyphicons-halflings.png (renamed from kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings.png)bin12799 -> 12799 bytes
-rw-r--r--kamon-examples/kamon-play-example/public/javascripts/jquery-1.9.0.min.js (renamed from kamon-examples/kamon-play-newrelic-example/public/javascripts/jquery-1.9.0.min.js)0
-rw-r--r--kamon-examples/kamon-play-example/public/stylesheets/bootstrap.css (renamed from kamon-examples/kamon-play-newrelic-example/public/stylesheets/bootstrap.css)0
-rw-r--r--kamon-examples/kamon-play-example/public/stylesheets/main.css (renamed from kamon-examples/kamon-play-newrelic-example/public/stylesheets/main.css)0
-rw-r--r--kamon-examples/kamon-play-newrelic-example/conf/application.conf51
-rw-r--r--kamon-examples/kamon-play-newrelic-example/conf/routes3
-rw-r--r--kamon-examples/kamon-play-newrelic-example/project/Build.scala22
-rw-r--r--kamon-examples/kamon-play-newrelic-example/project/build.properties1
-rw-r--r--kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala227
-rw-r--r--kamon-macros/src/main/scala/kamon/macros/InlineTraceContextMacro.scala46
-rw-r--r--kamon-newrelic/src/main/resources/reference.conf2
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetrics.scala6
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/MetricTranslator.scala2
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/NewRelic.scala10
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetrics.scala20
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/package.scala32
-rw-r--r--kamon-play/src/main/resources/META-INF/aop.xml1
-rw-r--r--kamon-play/src/main/resources/reference.conf4
-rw-r--r--kamon-play/src/main/scala/kamon/play/Play.scala3
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala70
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala36
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala5
-rw-r--r--kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala122
-rw-r--r--kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala47
-rw-r--r--kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala6
-rw-r--r--kamon-playground/src/main/resources/application.conf5
-rw-r--r--kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala21
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/Spray.scala4
-rw-r--r--kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala12
-rw-r--r--kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala19
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala15
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala91
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala (renamed from kamon-spray/src/test/scala/kamon/spray/ServerRequestInstrumentationSpec.scala)40
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/TestServer.scala11
-rw-r--r--kamon-statsd/src/main/resources/reference.conf4
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala35
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala42
-rw-r--r--kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala113
-rw-r--r--kamon-system-metrics/src/main/resources/reference.conf76
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala81
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala75
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala83
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala88
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala80
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala73
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala63
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala115
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala173
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/index21
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-freebsd-6.sobin0 -> 210641 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-linux.sobin0 -> 246605 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-solaris.sobin0 -> 251360 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-hpux-11.slbin0 -> 577452 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-linux.sobin0 -> 494929 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-pa-hpux-11.slbin0 -> 516096 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-aix-5.sobin0 -> 400925 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-linux.sobin0 -> 258547 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-aix-5.sobin0 -> 425077 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-linux.sobin0 -> 330767 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-s390x-linux.sobin0 -> 269932 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc-solaris.sobin0 -> 285004 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc64-solaris.sobin0 -> 261896 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal-macosx.dylibbin0 -> 377668 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal64-macosx.dylibbin0 -> 397440 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-5.sobin0 -> 179751 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-6.sobin0 -> 179379 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-linux.sobin0 -> 233385 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-solaris.sobin0 -> 242880 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-amd64-winnt.dllbin0 -> 402432 bytes
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-x86-winnt.dllbin0 -> 266240 bytes
-rw-r--r--kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala332
-rw-r--r--project/Dependencies.scala3
-rw-r--r--project/Projects.scala34
-rw-r--r--project/Settings.scala13
147 files changed, 5383 insertions, 1739 deletions
diff --git a/.travis.yml b/.travis.yml
index 734b0e39..6f2df611 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,4 +3,8 @@ script:
- sbt ++$TRAVIS_SCALA_VERSION 'set concurrentRestrictions in Global += Tags.limit(Tags.Compile, 2)' compile test:compile
- sbt ++$TRAVIS_SCALA_VERSION -Dakka.test.timefactor=1.5 'set concurrentRestrictions in Global += Tags.limit(Tags.Test, 1)' test
scala:
- - 2.11.1 \ No newline at end of file
+ - 2.11.1
+before_script:
+ - mkdir $TRAVIS_BUILD_DIR/tmp
+ - export SBT_OPTS="-Djava.io.tmpdir=$TRAVIS_BUILD_DIR/tmp"
+
diff --git a/kamon-core/src/main/java/kamon/util/Example.java b/kamon-core/src/main/java/kamon/util/Example.java
new file mode 100644
index 00000000..a5031182
--- /dev/null
+++ b/kamon-core/src/main/java/kamon/util/Example.java
@@ -0,0 +1,8 @@
+package kamon.util;
+
+public class Example {
+
+ public static void main(String args[]) {
+
+ }
+}
diff --git a/kamon-core/src/main/resources/META-INF/aop.xml b/kamon-core/src/main/resources/META-INF/aop.xml
index 3f7dd42d..07f5cfd1 100644
--- a/kamon-core/src/main/resources/META-INF/aop.xml
+++ b/kamon-core/src/main/resources/META-INF/aop.xml
@@ -2,24 +2,27 @@
<aspectj>
<aspects>
+ <!-- Disable AspectJ Weaver not present error -->
+ <aspect name="kamon.instrumentation.AspectJWeaverMissingWarning"/>
+
<!-- Actors -->
- <aspect name="akka.instrumentation.RepointableActorRefTraceContextMixin"/>
- <aspect name="akka.instrumentation.SystemMessageTraceContextMixin"/>
- <aspect name="akka.instrumentation.ActorSystemMessagePassingTracing"/>
- <aspect name="akka.instrumentation.EnvelopeTraceContextMixin"/>
- <aspect name="akka.instrumentation.ActorCellMetricsMixin"/>
- <aspect name="akka.instrumentation.BehaviourInvokeTracing"/>
- <aspect name="kamon.instrumentation.ActorLoggingTracing"/>
+ <aspect name="akka.instrumentation.TraceContextIntoRepointableActorRefMixin"/>
+ <aspect name="akka.instrumentation.TraceContextIntoSystemMessageMixin"/>
+ <aspect name="akka.instrumentation.ActorSystemMessageInstrumentation"/>
+ <aspect name="akka.instrumentation.TraceContextIntoEnvelopeMixin"/>
+ <aspect name="akka.instrumentation.ActorCellMetricsIntoActorCellMixin"/>
+ <aspect name="akka.instrumentation.ActorCellInstrumentation"/>
+ <aspect name="akka.instrumentation.ActorLoggingInstrumentation"/>
<!-- Dispatchers -->
- <aspect name="akka.instrumentation.DispatcherTracing"/>
- <aspect name="akka.instrumentation.DispatcherMetricsMixin"/>
+ <aspect name="akka.instrumentation.DispatcherInstrumentation"/>
+ <aspect name="akka.instrumentation.DispatcherMetricCollectionInfoIntoDispatcherMixin"/>
<!-- Futures -->
- <aspect name="kamon.instrumentation.FutureTracing"/>
+ <aspect name="kamon.instrumentation.scala.FutureInstrumentation"/>
<!-- Patterns -->
- <aspect name="akka.instrumentation.AskPatternTracing"/>
+ <aspect name="akka.instrumentation.AskPatternInstrumentation"/>
</aspects>
<weaver options="-XmessageHandlerClass:kamon.weaver.logging.KamonWeaverMessageHandler">
diff --git a/kamon-core/src/main/resources/reference.conf b/kamon-core/src/main/resources/reference.conf
index d2830892..b3df73bf 100644
--- a/kamon-core/src/main/resources/reference.conf
+++ b/kamon-core/src/main/resources/reference.conf
@@ -16,6 +16,22 @@ kamon {
gauge-recording-interval = 100 milliseconds
+ # Default size for the LongBuffer that gets allocated for metrics collection and merge. The
+ # value should correspond to the highest number of different buckets with values that might
+ # exist in a single histogram during a metrics collection. The default value of 33792 is a
+ # very conservative value and its equal to the total number of buckets required to cover values
+ # from 1 nanosecond to 1 hour with 0.1% precision (3 significant value digits). That means
+ # that would need to have at least one measurement on every bucket of a single histogram to
+ # fully utilize this buffer, which is *really* unlikely to ever happen. Since the buffer should
+ # be allocated once and reused it shouldn't impose a memory footprint issue.
+ default-collection-context-buffer-size = 33792
+
+ # Disables a big error message that will be typically logged if your application wasn't started
+ # with the -javaagent:/path-to-aspectj-weaver.jar option. If you are only using KamonStandalone
+ # it might be ok for you to turn this error off.
+ disable-aspectj-weaver-missing-error = false
+
+
dispatchers {
# Dispatcher for periodical gauge value recordings.
@@ -48,30 +64,33 @@ kamon {
]
precision {
+ default-histogram-precision {
+ highest-trackable-value = 3600000000000
+ significant-value-digits = 2
+ }
+
+ default-min-max-counter-precision {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999999999
+ significant-value-digits = 2
+ }
+
+ default-gauge-precision {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999999999
+ significant-value-digits = 2
+ }
+
+
actor {
- processing-time {
- highest-trackable-value = 3600000000000
- significant-value-digits = 2
- }
- time-in-mailbox {
- highest-trackable-value = 3600000000000
- significant-value-digits = 2
- }
- mailbox-size {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
+ processing-time = ${kamon.metrics.precision.default-histogram-precision}
+ time-in-mailbox = ${kamon.metrics.precision.default-histogram-precision}
+ mailbox-size = ${kamon.metrics.precision.default-min-max-counter-precision}
}
trace {
- elapsed-time {
- highest-trackable-value = 3600000000000
- significant-value-digits = 2
- }
- segment {
- highest-trackable-value = 3600000000000
- significant-value-digits = 2
- }
+ elapsed-time = ${kamon.metrics.precision.default-histogram-precision}
+ segment = ${kamon.metrics.precision.default-histogram-precision}
}
dispatcher {
diff --git a/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
new file mode 100644
index 00000000..3773e7d8
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
@@ -0,0 +1,92 @@
+package kamon.http
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric.instrument.Counter
+import kamon.metric._
+
+import scala.collection.concurrent.TrieMap
+
+object HttpServerMetrics extends MetricGroupIdentity {
+ val name: String = "http-server-metrics-recorder"
+ val category = new MetricGroupCategory {
+ val name: String = "http-server"
+ }
+
+ type TraceName = String
+ type StatusCode = String
+
+ case class CountPerStatusCode(statusCode: String) extends MetricIdentity {
+ def name: String = statusCode
+ }
+
+ case class TraceCountPerStatus(traceName: TraceName, statusCode: StatusCode) extends MetricIdentity {
+ def name: String = traceName + "_" + statusCode
+ }
+
+ class HttpServerMetricsRecorder extends MetricGroupRecorder {
+
+ private val counters = TrieMap[StatusCode, Counter]()
+ private val countersPerTrace = TrieMap[TraceName, TrieMap[StatusCode, Counter]]()
+
+ def recordResponse(statusCode: StatusCode): Unit = recordResponse(statusCode, 1L)
+
+ def recordResponse(statusCode: StatusCode, count: Long): Unit =
+ counters.getOrElseUpdate(statusCode, Counter()).increment(count)
+
+ def recordResponse(traceName: TraceName, statusCode: StatusCode): Unit = recordResponse(traceName, statusCode, 1L)
+
+ def recordResponse(traceName: TraceName, statusCode: StatusCode, count: Long): Unit = {
+ recordResponse(statusCode, count)
+ countersPerTrace.getOrElseUpdate(traceName, TrieMap()).getOrElseUpdate(statusCode, Counter()).increment(count)
+ }
+
+ def collect(context: CollectionContext): HttpServerMetricsSnapshot = {
+ val countsPerStatusCode = counters.map {
+ case (statusCode, counter) ⇒ (statusCode, counter.collect(context))
+ }.toMap
+
+ val countsPerTraceAndStatus = countersPerTrace.map {
+ case (traceName, countsPerStatus) ⇒
+ (traceName, countsPerStatus.map { case (statusCode, counter) ⇒ (statusCode, counter.collect(context)) }.toMap)
+ }.toMap
+
+ HttpServerMetricsSnapshot(countsPerStatusCode, countsPerTraceAndStatus)
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class HttpServerMetricsSnapshot(countsPerStatusCode: Map[StatusCode, Counter.Snapshot],
+ countsPerTraceAndStatusCode: Map[TraceName, Map[StatusCode, Counter.Snapshot]]) extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = HttpServerMetricsSnapshot
+
+ def merge(that: HttpServerMetricsSnapshot, context: CollectionContext): HttpServerMetricsSnapshot = {
+ val combinedCountsPerStatus = combineMaps(countsPerStatusCode, that.countsPerStatusCode)((l, r) ⇒ l.merge(r, context))
+ val combinedCountsPerTraceAndStatus = combineMaps(countsPerTraceAndStatusCode, that.countsPerTraceAndStatusCode) {
+ (leftCounts, rightCounts) ⇒ combineMaps(leftCounts, rightCounts)((l, r) ⇒ l.merge(r, context))
+ }
+ HttpServerMetricsSnapshot(combinedCountsPerStatus, combinedCountsPerTraceAndStatus)
+ }
+
+ def metrics: Map[MetricIdentity, MetricSnapshot] = {
+ countsPerStatusCode.map {
+ case (statusCode, count) ⇒ (CountPerStatusCode(statusCode), count)
+ } ++ {
+ for (
+ (traceName, countsPerStatus) ← countsPerTraceAndStatusCode;
+ (statusCode, count) ← countsPerStatus
+ ) yield (TraceCountPerStatus(traceName, statusCode), count)
+ }
+ }
+ }
+
+ val Factory = new MetricGroupFactory {
+ type GroupRecorder = HttpServerMetricsRecorder
+
+ def create(config: Config, system: ActorSystem): HttpServerMetricsRecorder =
+ new HttpServerMetricsRecorder()
+ }
+
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala b/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala
new file mode 100644
index 00000000..5ca4481e
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/instrumentation/AspectJWeaverMissingWarning.scala
@@ -0,0 +1,17 @@
+package kamon.instrumentation
+
+import _root_.akka.event.EventStream
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation.{ Around, Pointcut, Aspect }
+
+@Aspect
+class AspectJWeaverMissingWarning {
+
+ @Pointcut("execution(* kamon.metric.MetricsExtension.printInitializationMessage(..)) && args(eventStream, *)")
+ def printInitializationMessage(eventStream: EventStream): Unit = {}
+
+ @Around("printInitializationMessage(eventStream)")
+ def aroundPrintInitializationMessage(pjp: ProceedingJoinPoint, eventStream: EventStream): Unit = {
+ pjp.proceed(Array[AnyRef](eventStream, Boolean.box(true)))
+ }
+}
diff --git a/kamon-core/src/main/scala/akka/instrumentation/ActorMessagePassingTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala
index 6db86828..5fce4555 100644
--- a/kamon-core/src/main/scala/akka/instrumentation/ActorMessagePassingTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala
@@ -1,34 +1,32 @@
-/* ===================================================
+/*
+ * =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ========================================================== */
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
package akka.instrumentation
-import org.aspectj.lang.annotation._
-import org.aspectj.lang.ProceedingJoinPoint
import akka.actor._
import akka.dispatch.{ Envelope, MessageDispatcher }
-import kamon.trace._
-import kamon.metrics.{ ActorMetrics, Metrics }
import kamon.Kamon
-import kamon.metrics.ActorMetrics.ActorMetricRecorder
-import kamon.metrics.instruments.MinMaxCounter
-import kamon.metrics.instruments.MinMaxCounter.CounterMeasurement
+import kamon.metric.ActorMetrics.ActorMetricsRecorder
+import kamon.metric.{ ActorMetrics, Metrics }
+import kamon.trace._
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
@Aspect
-class BehaviourInvokeTracing {
+class ActorCellInstrumentation {
@Pointcut("execution(akka.actor.ActorCell.new(..)) && this(cell) && args(system, ref, props, dispatcher, parent)")
def actorCellCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {}
@@ -42,19 +40,6 @@ class BehaviourInvokeTracing {
cellWithMetrics.metricIdentity = metricIdentity
cellWithMetrics.actorMetricsRecorder = metricsExtension.register(metricIdentity, ActorMetrics.Factory)
-
- if (cellWithMetrics.actorMetricsRecorder.isDefined) {
- cellWithMetrics.mailboxSizeCollectorCancellable = metricsExtension.scheduleGaugeRecorder {
- cellWithMetrics.actorMetricsRecorder.map { am ⇒
- import am.mailboxSize._
- val CounterMeasurement(min, max, current) = cellWithMetrics.queueSize.collect()
-
- record(min)
- record(max)
- record(current)
- }
- }
- }
}
@Pointcut("(execution(* akka.actor.ActorCell.invoke(*)) || execution(* akka.routing.RoutedActorCell.sendMessage(*))) && this(cell) && args(envelope)")
@@ -75,7 +60,7 @@ class BehaviourInvokeTracing {
am ⇒
am.processingTime.record(System.nanoTime() - timestampBeforeProcessing)
am.timeInMailbox.record(timestampBeforeProcessing - contextAndTimestamp.captureNanoTime)
- cellWithMetrics.queueSize.decrement()
+ am.mailboxSize.decrement()
}
}
}
@@ -86,7 +71,7 @@ class BehaviourInvokeTracing {
@After("sendingMessageToActorCell(cell)")
def afterSendMessageToActorCell(cell: ActorCell): Unit = {
val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
- cellWithMetrics.actorMetricsRecorder.map(am ⇒ cellWithMetrics.queueSize.increment())
+ cellWithMetrics.actorMetricsRecorder.map(am ⇒ am.mailboxSize.increment())
}
@Pointcut("execution(* akka.actor.ActorCell.stop()) && this(cell)")
@@ -110,27 +95,26 @@ class BehaviourInvokeTracing {
val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
cellWithMetrics.actorMetricsRecorder.map {
- am ⇒ am.errorCounter.record(1L)
+ am ⇒ am.errors.increment()
}
}
}
trait ActorCellMetrics {
var metricIdentity: ActorMetrics = _
- var actorMetricsRecorder: Option[ActorMetricRecorder] = _
+ var actorMetricsRecorder: Option[ActorMetricsRecorder] = _
var mailboxSizeCollectorCancellable: Cancellable = _
- val queueSize = MinMaxCounter()
}
@Aspect
-class ActorCellMetricsMixin {
+class ActorCellMetricsIntoActorCellMixin {
@DeclareMixin("akka.actor.ActorCell")
def mixinActorCellMetricsToActorCell: ActorCellMetrics = new ActorCellMetrics {}
}
@Aspect
-class EnvelopeTraceContextMixin {
+class TraceContextIntoEnvelopeMixin {
@DeclareMixin("akka.dispatch.Envelope")
def mixinTraceContextAwareToEnvelope: TraceContextAware = TraceContextAware.default
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/ActorLoggingTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala
index 85d39d3e..ee9d442f 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/ActorLoggingTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala
@@ -1,11 +1,11 @@
/*
* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
@@ -14,14 +14,14 @@
* =========================================================================================
*/
-package kamon.instrumentation
+package akka.instrumentation
-import org.aspectj.lang.annotation._
-import org.aspectj.lang.ProceedingJoinPoint
import kamon.trace.{ TraceContextAware, TraceRecorder }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
@Aspect
-class ActorLoggingTracing {
+class ActorLoggingInstrumentation {
@DeclareMixin("akka.event.Logging.LogEvent+")
def mixinTraceContextAwareToLogEvent: TraceContextAware = TraceContextAware.default
diff --git a/kamon-core/src/main/scala/akka/instrumentation/ActorSystemMessagePassingTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala
index 7d03d946..9b6b6866 100644
--- a/kamon-core/src/main/scala/akka/instrumentation/ActorSystemMessagePassingTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala
@@ -1,12 +1,44 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
package akka.instrumentation
-import org.aspectj.lang.annotation._
import akka.dispatch.sysmsg.EarliestFirstSystemMessageList
+import kamon.trace.{ TraceContextAware, TraceRecorder }
import org.aspectj.lang.ProceedingJoinPoint
-import kamon.trace.{ TraceRecorder, TraceContextAware }
+import org.aspectj.lang.annotation._
+
+@Aspect
+class ActorSystemMessageInstrumentation {
+
+ @Pointcut("execution(* akka.actor.ActorCell.invokeAll$1(..)) && args(messages, *)")
+ def systemMessageProcessing(messages: EarliestFirstSystemMessageList): Unit = {}
+
+ @Around("systemMessageProcessing(messages)")
+ def aroundSystemMessageInvoke(pjp: ProceedingJoinPoint, messages: EarliestFirstSystemMessageList): Any = {
+ if (messages.nonEmpty) {
+ val ctx = messages.head.asInstanceOf[TraceContextAware].traceContext
+ TraceRecorder.withTraceContext(ctx)(pjp.proceed())
+
+ } else pjp.proceed()
+ }
+}
@Aspect
-class SystemMessageTraceContextMixin {
+class TraceContextIntoSystemMessageMixin {
@DeclareMixin("akka.dispatch.sysmsg.SystemMessage+")
def mixinTraceContextAwareToSystemMessage: TraceContextAware = TraceContextAware.default
@@ -22,7 +54,7 @@ class SystemMessageTraceContextMixin {
}
@Aspect
-class RepointableActorRefTraceContextMixin {
+class TraceContextIntoRepointableActorRefMixin {
@DeclareMixin("akka.actor.RepointableActorRef")
def mixinTraceContextAwareToRepointableActorRef: TraceContextAware = TraceContextAware.default
@@ -45,21 +77,4 @@ class RepointableActorRefTraceContextMixin {
pjp.proceed()
}
}
-
-}
-
-@Aspect
-class ActorSystemMessagePassingTracing {
-
- @Pointcut("execution(* akka.actor.ActorCell.invokeAll$1(..)) && args(messages, *)")
- def systemMessageProcessing(messages: EarliestFirstSystemMessageList): Unit = {}
-
- @Around("systemMessageProcessing(messages)")
- def aroundSystemMessageInvoke(pjp: ProceedingJoinPoint, messages: EarliestFirstSystemMessageList): Any = {
- if (messages.nonEmpty) {
- val ctx = messages.head.asInstanceOf[TraceContextAware].traceContext
- TraceRecorder.withTraceContext(ctx)(pjp.proceed())
-
- } else pjp.proceed()
- }
-}
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/akka/instrumentation/AskPatternTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala
index 31ec92a4..3bf13ce2 100644
--- a/kamon-core/src/main/scala/akka/instrumentation/AskPatternTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala
@@ -1,11 +1,11 @@
/*
* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
@@ -16,16 +16,17 @@
package akka.instrumentation
-import org.aspectj.lang.annotation.{ AfterReturning, Pointcut, Aspect }
-import akka.event.Logging.Warning
-import scala.compat.Platform.EOL
import akka.actor.ActorRefProvider
+import akka.event.Logging.Warning
import akka.pattern.{ AskTimeoutException, PromiseActorRef }
-import kamon.trace.Trace
import kamon.Kamon
+import kamon.trace.Trace
+import org.aspectj.lang.annotation.{ AfterReturning, Aspect, Pointcut }
+
+import scala.compat.Platform.EOL
@Aspect
-class AskPatternTracing {
+class AskPatternInstrumentation {
class StackTraceCaptureException extends Throwable
@@ -46,7 +47,7 @@ class AskPatternTracing {
case timeout: AskTimeoutException ⇒
val stackString = stack.getStackTrace.drop(3).mkString("", EOL, EOL)
- system.eventStream.publish(Warning("AskPatternTracing", classOf[AskPatternTracing],
+ system.eventStream.publish(Warning("AskPatternTracing", classOf[AskPatternInstrumentation],
"Timeout triggered for ask pattern registered at: " + stackString))
}
}
diff --git a/kamon-core/src/main/scala/akka/instrumentation/DispatcherTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
index 60cc4ddf..db366e8c 100644
--- a/kamon-core/src/main/scala/akka/instrumentation/DispatcherTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
@@ -5,7 +5,7 @@
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
@@ -16,19 +16,21 @@
package akka.instrumentation
-import org.aspectj.lang.annotation._
-import akka.dispatch.{ Dispatchers, ExecutorServiceDelegate, Dispatcher, MessageDispatcher }
-import kamon.metrics.{ Metrics, DispatcherMetrics }
-import kamon.metrics.DispatcherMetrics.DispatcherMetricRecorder
-import kamon.Kamon
-import akka.actor.{ Cancellable, ActorSystemImpl }
-import scala.concurrent.forkjoin.ForkJoinPool
-import java.util.concurrent.ThreadPoolExecutor
import java.lang.reflect.Method
+import java.util.concurrent.ThreadPoolExecutor
+
+import akka.actor.{ ActorSystemImpl, Cancellable }
+import akka.dispatch.{ Dispatcher, Dispatchers, ExecutorServiceDelegate, MessageDispatcher }
import akka.instrumentation.DispatcherMetricsCollector.DispatcherMetricsMeasurement
+import kamon.Kamon
+import kamon.metric.DispatcherMetrics.DispatcherMetricRecorder
+import kamon.metric.{ DispatcherMetrics, Metrics }
+import org.aspectj.lang.annotation._
+
+import scala.concurrent.forkjoin.ForkJoinPool
@Aspect
-class DispatcherTracing {
+class DispatcherInstrumentation {
@Pointcut("execution(akka.dispatch.Dispatchers.new(..)) && this(dispatchers) && cflow(execution(akka.actor.ActorSystemImpl.new(..)) && this(system))")
def onActorSystemStartup(dispatchers: Dispatchers, system: ActorSystemImpl) = {}
@@ -45,7 +47,7 @@ class DispatcherTracing {
@AfterReturning(pointcut = "onDispatchersLookup(dispatchers)", returning = "dispatcher")
def afterReturningLookup(dispatchers: Dispatchers, dispatcher: Dispatcher): Unit = {
val dispatchersWithActorSystem = dispatchers.asInstanceOf[DispatchersWithActorSystem]
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMessageMetrics]
+ val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
dispatcherWithMetrics.actorSystem = dispatchersWithActorSystem.actorSystem
}
@@ -62,7 +64,7 @@ class DispatcherTracing {
@After("onDispatcherStartup(dispatcher)")
def afterDispatcherStartup(dispatcher: MessageDispatcher): Unit = {
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMessageMetrics]
+ val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
val metricsExtension = Kamon(Metrics)(dispatcherWithMetrics.actorSystem)
val metricIdentity = DispatcherMetrics(dispatcher.id)
@@ -90,7 +92,7 @@ class DispatcherTracing {
@After("onDispatcherShutdown(dispatcher)")
def afterDispatcherShutdown(dispatcher: MessageDispatcher): Unit = {
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMessageMetrics]
+ val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
dispatcherWithMetrics.dispatcherMetricsRecorder.map {
dispatcher ⇒
@@ -101,16 +103,16 @@ class DispatcherTracing {
}
@Aspect
-class DispatcherMetricsMixin {
+class DispatcherMetricCollectionInfoIntoDispatcherMixin {
@DeclareMixin("akka.dispatch.Dispatcher")
- def mixinDispatcherMetricsToMessageDispatcher: DispatcherMessageMetrics = new DispatcherMessageMetrics {}
+ def mixinDispatcherMetricsToMessageDispatcher: DispatcherMetricCollectionInfo = new DispatcherMetricCollectionInfo {}
@DeclareMixin("akka.dispatch.Dispatchers")
def mixinDispatchersToDispatchersWithActorSystem: DispatchersWithActorSystem = new DispatchersWithActorSystem {}
}
-trait DispatcherMessageMetrics {
+trait DispatcherMetricCollectionInfo {
var metricIdentity: DispatcherMetrics = _
var dispatcherMetricsRecorder: Option[DispatcherMetricRecorder] = _
var dispatcherCollectorCancellable: Cancellable = _
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala b/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala
index e5efbc15..e79090a8 100644
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/CounterRecorder.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala
@@ -1,4 +1,3 @@
-package kamon.metrics.instruments
/*
* =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
@@ -15,24 +14,22 @@ package kamon.metrics.instruments
* =========================================================================================
*/
-import kamon.metrics._
-import kamon.metrics.MetricSnapshot.Measurement
+package org.HdrHistogram
-import jsr166e.LongAdder
+import java.util.concurrent.atomic.{ AtomicLongArray, AtomicLongFieldUpdater }
-class CounterRecorder extends MetricRecorder {
- private val counter = new LongAdder
+trait AtomicHistogramFieldsAccessor {
+ self: AtomicHistogram ⇒
- def record(value: Long): Unit = {
- counter.add(value)
- }
+ def countsArray(): AtomicLongArray = self.counts
- def collect(): MetricSnapshotLike = {
- val sum = counter.sumThenReset()
- MetricSnapshot(InstrumentTypes.Counter, sum, Scale.Unit, Vector(Measurement(1, sum)))
- }
+ def unitMagnitude(): Int = self.unitMagnitude
+
+ def subBucketHalfCount(): Int = self.subBucketHalfCount
+
+ def subBucketHalfCountMagnitude(): Int = self.subBucketHalfCountMagnitude
}
-object CounterRecorder {
- def apply(): CounterRecorder = new CounterRecorder()
-} \ No newline at end of file
+object AtomicHistogramFieldsAccessor {
+ def totalCountUpdater(): AtomicLongFieldUpdater[AtomicHistogram] = AtomicHistogram.totalCountUpdater
+}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/FutureTracing.scala b/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala
index 634c94a1..bda2da78 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/FutureTracing.scala
+++ b/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala
@@ -1,26 +1,27 @@
-/* ===================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ========================================================== */
-package kamon.instrumentation
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.instrumentation.scala
-import org.aspectj.lang.annotation._
-import org.aspectj.lang.ProceedingJoinPoint
import kamon.trace.{ TraceContextAware, TraceRecorder }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
@Aspect
-class FutureTracing {
+class FutureInstrumentation {
@DeclareMixin("scala.concurrent.impl.CallbackRunnable || scala.concurrent.impl.Future.PromiseCompletingRunnable")
def mixinTraceContextAwareToFutureRelatedRunnable: TraceContextAware = TraceContextAware.default
@@ -39,9 +40,9 @@ class FutureTracing {
@Around("futureRelatedRunnableExecution(runnable)")
def aroundExecution(pjp: ProceedingJoinPoint, runnable: TraceContextAware): Any = {
- TraceRecorder.withTraceContext(runnable.traceContext) {
+ TraceRecorder.withInlineTraceContextReplacement(runnable.traceContext) {
pjp.proceed()
}
}
-} \ No newline at end of file
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala b/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala
new file mode 100644
index 00000000..bb412f79
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/ActorMetrics.scala
@@ -0,0 +1,89 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric.instrument.{ MinMaxCounter, Counter, Histogram }
+
+case class ActorMetrics(name: String) extends MetricGroupIdentity {
+ val category = ActorMetrics
+}
+
+object ActorMetrics extends MetricGroupCategory {
+ val name = "actor"
+
+ case object ProcessingTime extends MetricIdentity { val name = "processing-time" }
+ case object MailboxSize extends MetricIdentity { val name = "mailbox-size" }
+ case object TimeInMailbox extends MetricIdentity { val name = "time-in-mailbox" }
+ case object Errors extends MetricIdentity { val name = "errors" }
+
+ case class ActorMetricsRecorder(processingTime: Histogram, timeInMailbox: Histogram, mailboxSize: MinMaxCounter,
+ errors: Counter) extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): ActorMetricSnapshot =
+ ActorMetricSnapshot(
+ processingTime.collect(context),
+ timeInMailbox.collect(context),
+ mailboxSize.collect(context),
+ errors.collect(context))
+
+ def cleanup: Unit = {
+ processingTime.cleanup
+ mailboxSize.cleanup
+ timeInMailbox.cleanup
+ errors.cleanup
+ }
+ }
+
+ case class ActorMetricSnapshot(processingTime: Histogram.Snapshot, timeInMailbox: Histogram.Snapshot,
+ mailboxSize: Histogram.Snapshot, errors: Counter.Snapshot) extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = ActorMetricSnapshot
+
+ def merge(that: ActorMetricSnapshot, context: CollectionContext): ActorMetricSnapshot =
+ ActorMetricSnapshot(
+ processingTime.merge(that.processingTime, context),
+ timeInMailbox.merge(that.timeInMailbox, context),
+ mailboxSize.merge(that.mailboxSize, context),
+ errors.merge(that.errors, context))
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ (ProcessingTime -> processingTime),
+ (MailboxSize -> mailboxSize),
+ (TimeInMailbox -> timeInMailbox),
+ (Errors -> errors))
+ }
+
+ val Factory = new MetricGroupFactory {
+ type GroupRecorder = ActorMetricsRecorder
+
+ def create(config: Config, system: ActorSystem): ActorMetricsRecorder = {
+ val settings = config.getConfig("precision.actor")
+
+ val processingTimeConfig = settings.getConfig("processing-time")
+ val timeInMailboxConfig = settings.getConfig("time-in-mailbox")
+ val mailboxSizeConfig = settings.getConfig("mailbox-size")
+
+ new ActorMetricsRecorder(
+ Histogram.fromConfig(processingTimeConfig),
+ Histogram.fromConfig(timeInMailboxConfig),
+ MinMaxCounter.fromConfig(mailboxSizeConfig, system),
+ Counter())
+ }
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala b/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala
new file mode 100644
index 00000000..fbce783c
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/DispatcherMetrics.scala
@@ -0,0 +1,88 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric.instrument.{ Histogram, HdrHistogram }
+
+case class DispatcherMetrics(name: String) extends MetricGroupIdentity {
+ val category = DispatcherMetrics
+}
+
+object DispatcherMetrics extends MetricGroupCategory {
+ val name = "dispatcher"
+
+ case object MaximumPoolSize extends MetricIdentity { val name = "maximum-pool-size" }
+ case object RunningThreadCount extends MetricIdentity { val name = "running-thread-count" }
+ case object QueueTaskCount extends MetricIdentity { val name = "queued-task-count" }
+ case object PoolSize extends MetricIdentity { val name = "pool-size" }
+
+ case class DispatcherMetricRecorder(maximumPoolSize: Histogram, runningThreadCount: Histogram,
+ queueTaskCount: Histogram, poolSize: Histogram)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot =
+ DispatcherMetricSnapshot(
+ maximumPoolSize.collect(context),
+ runningThreadCount.collect(context),
+ queueTaskCount.collect(context),
+ poolSize.collect(context))
+
+ def cleanup: Unit = {}
+
+ }
+
+ case class DispatcherMetricSnapshot(maximumPoolSize: Histogram.Snapshot, runningThreadCount: Histogram.Snapshot,
+ queueTaskCount: Histogram.Snapshot, poolSize: Histogram.Snapshot) extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = DispatcherMetricSnapshot
+
+ def merge(that: DispatcherMetricSnapshot, context: CollectionContext): DispatcherMetricSnapshot =
+ DispatcherMetricSnapshot(
+ maximumPoolSize.merge(that.maximumPoolSize, context),
+ runningThreadCount.merge(that.runningThreadCount, context),
+ queueTaskCount.merge(that.queueTaskCount, context),
+ poolSize.merge(that.poolSize, context))
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ (MaximumPoolSize -> maximumPoolSize),
+ (RunningThreadCount -> runningThreadCount),
+ (QueueTaskCount -> queueTaskCount),
+ (PoolSize -> poolSize))
+ }
+
+ val Factory = new MetricGroupFactory {
+ type GroupRecorder = DispatcherMetricRecorder
+
+ def create(config: Config, system: ActorSystem): DispatcherMetricRecorder = {
+ val settings = config.getConfig("precision.dispatcher")
+
+ val maximumPoolSizeConfig = settings.getConfig("maximum-pool-size")
+ val runningThreadCountConfig = settings.getConfig("running-thread-count")
+ val queueTaskCountConfig = settings.getConfig("queued-task-count")
+ val poolSizeConfig = settings.getConfig("pool-size")
+
+ new DispatcherMetricRecorder(
+ Histogram.fromConfig(maximumPoolSizeConfig),
+ Histogram.fromConfig(runningThreadCountConfig),
+ Histogram.fromConfig(queueTaskCountConfig),
+ Histogram.fromConfig(poolSizeConfig))
+ }
+ }
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala b/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala
new file mode 100644
index 00000000..3761f5a5
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala
@@ -0,0 +1,75 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import java.nio.{ LongBuffer }
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+
+trait MetricGroupCategory {
+ def name: String
+}
+
+trait MetricGroupIdentity {
+ def name: String
+ def category: MetricGroupCategory
+}
+
+trait MetricIdentity {
+ def name: String
+}
+
+trait CollectionContext {
+ def buffer: LongBuffer
+}
+
+object CollectionContext {
+ def apply(longBufferSize: Int): CollectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(longBufferSize)
+ }
+}
+
+trait MetricGroupRecorder {
+ def collect(context: CollectionContext): MetricGroupSnapshot
+ def cleanup: Unit
+}
+
+trait MetricSnapshot {
+ type SnapshotType
+
+ def merge(that: SnapshotType, context: CollectionContext): SnapshotType
+}
+
+trait MetricGroupSnapshot {
+ type GroupSnapshotType
+
+ def metrics: Map[MetricIdentity, MetricSnapshot]
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType
+}
+
+private[kamon] trait MetricRecorder {
+ type SnapshotType <: MetricSnapshot
+
+ def collect(context: CollectionContext): SnapshotType
+ def cleanup: Unit
+}
+
+trait MetricGroupFactory {
+ type GroupRecorder <: MetricGroupRecorder
+ def create(config: Config, system: ActorSystem): GroupRecorder
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
index c60babb2..00214f51 100644
--- a/kamon-core/src/main/scala/kamon/metrics/MetricsExtension.scala
+++ b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
@@ -14,7 +14,10 @@
* =========================================================================================
*/
-package kamon.metrics
+package kamon.metric
+
+import akka.event.Logging.Error
+import akka.event.EventStream
import scala.collection.concurrent.TrieMap
import akka.actor._
@@ -22,12 +25,13 @@ import com.typesafe.config.Config
import kamon.util.GlobPathFilter
import kamon.Kamon
import akka.actor
-import kamon.metrics.Metrics.MetricGroupFilter
-import kamon.metrics.Subscriptions.Subscribe
+import kamon.metric.Metrics.MetricGroupFilter
+import kamon.metric.Subscriptions.{ Unsubscribe, Subscribe }
import java.util.concurrent.TimeUnit
class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
val metricsExtConfig = system.settings.config.getConfig("kamon.metrics")
+ printInitializationMessage(system.eventStream, metricsExtConfig.getBoolean("disable-aspectj-weaver-missing-error"))
/** Configured Dispatchers */
val metricSubscriptionsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.metric-subscriptions"))
@@ -42,7 +46,7 @@ class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
def register(identity: MetricGroupIdentity, factory: MetricGroupFactory): Option[factory.GroupRecorder] = {
if (shouldTrack(identity))
- Some(storage.getOrElseUpdate(identity, factory.create(metricsExtConfig)).asInstanceOf[factory.GroupRecorder])
+ Some(storage.getOrElseUpdate(identity, factory.create(metricsExtConfig, system)).asInstanceOf[factory.GroupRecorder])
else
None
}
@@ -51,13 +55,11 @@ class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
storage.remove(identity)
}
- def subscribe[C <: MetricGroupCategory](category: C, selection: String, receiver: ActorRef, permanently: Boolean = false): Unit = {
- subscriptions.tell(Subscribe(category, selection, permanently), receiver)
- }
+ def subscribe[C <: MetricGroupCategory](category: C, selection: String, subscriber: ActorRef, permanently: Boolean = false): Unit =
+ subscriptions.tell(Subscribe(category, selection, subscriber, permanently), subscriber)
- def collect: Map[MetricGroupIdentity, MetricGroupSnapshot] = {
- (for ((identity, recorder) ← storage) yield (identity, recorder.collect)).toMap
- }
+ def unsubscribe(subscriber: ActorRef): Unit =
+ subscriptions.tell(Unsubscribe(subscriber), subscriber)
def scheduleGaugeRecorder(body: ⇒ Unit): Cancellable = {
import scala.concurrent.duration._
@@ -68,7 +70,7 @@ class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
}
private def shouldTrack(identity: MetricGroupIdentity): Boolean = {
- filters.get(identity.category.name).map(filter ⇒ filter.accept(identity.name)).getOrElse(false)
+ filters.get(identity.category.name).map(filter ⇒ filter.accept(identity.name)).getOrElse(true)
}
def loadFilters(config: Config): Map[String, MetricGroupFilter] = {
@@ -92,6 +94,34 @@ class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
allFilters.toMap
}
+
+ def buildDefaultCollectionContext: CollectionContext =
+ CollectionContext(metricsExtConfig.getInt("default-collection-context-buffer-size"))
+
+ def printInitializationMessage(eventStream: EventStream, disableWeaverMissingError: Boolean): Unit = {
+ if (!disableWeaverMissingError) {
+ val weaverMissingMessage =
+ """
+ |
+ | ___ _ ___ _ _ ___ ___ _ _
+ | / _ \ | | |_ | | | | | | \/ |(_) (_)
+ |/ /_\ \ ___ _ __ ___ ___ | |_ | | | | | | ___ __ _ __ __ ___ _ __ | . . | _ ___ ___ _ _ __ __ _
+ || _ |/ __|| '_ \ / _ \ / __|| __| | | | |/\| | / _ \ / _` |\ \ / // _ \| '__| | |\/| || |/ __|/ __|| || '_ \ / _` |
+ || | | |\__ \| |_) || __/| (__ | |_ /\__/ / \ /\ /| __/| (_| | \ V /| __/| | | | | || |\__ \\__ \| || | | || (_| |
+ |\_| |_/|___/| .__/ \___| \___| \__|\____/ \/ \/ \___| \__,_| \_/ \___||_| \_| |_/|_||___/|___/|_||_| |_| \__, |
+ | | | __/ |
+ | |_| |___/
+ |
+ | It seems like your application wasn't started with the -javaagent:/path-to-aspectj-weaver.jar option. Without that Kamon might
+ | not work properly, if you need help on setting up the weaver go to http://kamon.io/introduction/get-started/ for more info. If
+ | you are sure that you don't need the weaver (e.g. you are only using KamonStandalone) then you can disable this error message
+ | by changing the kamon.metrics.disable-aspectj-weaver-missing-error setting in your configuration file.
+ |
+ """.stripMargin
+
+ eventStream.publish(Error("MetricsExtension", classOf[MetricsExtension], weaverMissingMessage))
+ }
+ }
}
object Metrics extends ExtensionId[MetricsExtension] with ExtensionIdProvider {
diff --git a/kamon-core/src/main/scala/kamon/metrics/Scale.scala b/kamon-core/src/main/scala/kamon/metric/Scale.scala
index 6899490a..2f27c1a3 100644
--- a/kamon-core/src/main/scala/kamon/metrics/Scale.scala
+++ b/kamon-core/src/main/scala/kamon/metric/Scale.scala
@@ -14,7 +14,7 @@
* =========================================================================================
*/
-package kamon.metrics
+package kamon.metric
class Scale(val numericValue: Double) extends AnyVal
diff --git a/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala b/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
new file mode 100644
index 00000000..c6571507
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
@@ -0,0 +1,173 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor._
+import kamon.metric.Subscriptions._
+import kamon.util.GlobPathFilter
+import scala.concurrent.duration.{ FiniteDuration, Duration }
+import java.util.concurrent.TimeUnit
+import kamon.Kamon
+import kamon.metric.TickMetricSnapshotBuffer.FlushBuffer
+
+class Subscriptions extends Actor {
+ import context.system
+
+ val flushMetricsSchedule = scheduleFlushMessage()
+ val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
+
+ var lastTick: Long = System.currentTimeMillis()
+ var oneShotSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
+ var permanentSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
+
+ def receive = {
+ case Subscribe(category, selection, subscriber, permanent) ⇒ subscribe(category, selection, subscriber, permanent)
+ case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber)
+ case Terminated(subscriber) ⇒ unsubscribe(subscriber)
+ case FlushMetrics ⇒ flush()
+ }
+
+ def subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanent: Boolean): Unit = {
+ context.watch(subscriber)
+ val newFilter: MetricSelectionFilter = GroupAndPatternFilter(category, new GlobPathFilter(selection))
+
+ if (permanent) {
+ permanentSubscriptions = permanentSubscriptions.updated(subscriber, newFilter combine {
+ permanentSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
+ })
+ } else {
+ oneShotSubscriptions = oneShotSubscriptions.updated(subscriber, newFilter combine {
+ oneShotSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
+ })
+ }
+ }
+
+ def unsubscribe(subscriber: ActorRef): Unit = {
+ if (permanentSubscriptions.contains(subscriber))
+ permanentSubscriptions = permanentSubscriptions - subscriber
+
+ if (oneShotSubscriptions.contains(subscriber))
+ oneShotSubscriptions = oneShotSubscriptions - subscriber
+ }
+
+ def flush(): Unit = {
+ val currentTick = System.currentTimeMillis()
+ val snapshots = collectAll()
+
+ dispatchSelectedMetrics(lastTick, currentTick, permanentSubscriptions, snapshots)
+ dispatchSelectedMetrics(lastTick, currentTick, oneShotSubscriptions, snapshots)
+
+ lastTick = currentTick
+ oneShotSubscriptions = Map.empty
+ }
+
+ def collectAll(): Map[MetricGroupIdentity, MetricGroupSnapshot] = {
+ val allMetrics = Kamon(Metrics).storage
+ val builder = Map.newBuilder[MetricGroupIdentity, MetricGroupSnapshot]
+
+ allMetrics.foreach {
+ case (identity, recorder) ⇒ builder += ((identity, recorder.collect(collectionContext)))
+ }
+
+ builder.result()
+ }
+
+ def dispatchSelectedMetrics(lastTick: Long, currentTick: Long, subscriptions: Map[ActorRef, MetricSelectionFilter],
+ snapshots: Map[MetricGroupIdentity, MetricGroupSnapshot]): Unit = {
+
+ for ((subscriber, filter) ← subscriptions) {
+ val selection = snapshots.filter(group ⇒ filter.accept(group._1))
+ val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
+
+ subscriber ! tickMetrics
+ }
+ }
+
+ def scheduleFlushMessage(): Cancellable = {
+ val config = context.system.settings.config
+ val tickInterval = Duration(config.getDuration("kamon.metrics.tick-interval", TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS)
+ context.system.scheduler.schedule(tickInterval, tickInterval, self, FlushMetrics)(context.dispatcher)
+ }
+}
+
+object Subscriptions {
+ case object FlushMetrics
+ case class Unsubscribe(subscriber: ActorRef)
+ case class Subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanently: Boolean = false)
+ case class TickMetricSnapshot(from: Long, to: Long, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot])
+
+ trait MetricSelectionFilter {
+ def accept(identity: MetricGroupIdentity): Boolean
+ }
+
+ object MetricSelectionFilter {
+ val empty = new MetricSelectionFilter {
+ def accept(identity: MetricGroupIdentity): Boolean = false
+ }
+
+ implicit class CombinableMetricSelectionFilter(msf: MetricSelectionFilter) {
+ def combine(that: MetricSelectionFilter): MetricSelectionFilter = new MetricSelectionFilter {
+ def accept(identity: MetricGroupIdentity): Boolean = msf.accept(identity) || that.accept(identity)
+ }
+ }
+ }
+
+ case class GroupAndPatternFilter(category: MetricGroupCategory, globFilter: GlobPathFilter) extends MetricSelectionFilter {
+ def accept(identity: MetricGroupIdentity): Boolean = {
+ category.equals(identity.category) && globFilter.accept(identity.name)
+ }
+ }
+}
+
+class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
+ val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
+ val collectionContext = Kamon(Metrics)(context.system).buildDefaultCollectionContext
+
+ def receive = empty
+
+ def empty: Actor.Receive = {
+ case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
+ case FlushBuffer ⇒ // Nothing to flush.
+ }
+
+ def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
+ case TickMetricSnapshot(_, to, tickMetrics) ⇒
+ val combinedMetrics = combineMaps(buffered.metrics, tickMetrics)(mergeMetricGroup)
+ val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
+
+ context become (buffering(combinedSnapshot))
+
+ case FlushBuffer ⇒
+ receiver ! buffered
+ context become (empty)
+
+ }
+
+ override def postStop(): Unit = {
+ flushSchedule.cancel()
+ super.postStop()
+ }
+
+ def mergeMetricGroup(left: MetricGroupSnapshot, right: MetricGroupSnapshot) = left.merge(right.asInstanceOf[left.GroupSnapshotType], collectionContext).asInstanceOf[MetricGroupSnapshot] // ??? //Combined(combineMaps(left.metrics, right.metrics)((l, r) ⇒ l.merge(r, collectionContext)))
+}
+
+object TickMetricSnapshotBuffer {
+ case object FlushBuffer
+
+ def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
+ Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
new file mode 100644
index 00000000..c506fe81
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
@@ -0,0 +1,77 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor.ActorSystem
+import kamon.metric.instrument.{ Histogram }
+
+import scala.collection.concurrent.TrieMap
+import com.typesafe.config.Config
+
+case class TraceMetrics(name: String) extends MetricGroupIdentity {
+ val category = TraceMetrics
+}
+
+object TraceMetrics extends MetricGroupCategory {
+ val name = "trace"
+
+ case object ElapsedTime extends MetricIdentity { val name = "elapsed-time" }
+ case class HttpClientRequest(name: String) extends MetricIdentity
+
+ case class TraceMetricRecorder(elapsedTime: Histogram, private val segmentRecorderFactory: () ⇒ Histogram)
+ extends MetricGroupRecorder {
+
+ private val segments = TrieMap[MetricIdentity, Histogram]()
+
+ def segmentRecorder(segmentIdentity: MetricIdentity): Histogram =
+ segments.getOrElseUpdate(segmentIdentity, segmentRecorderFactory.apply())
+
+ def collect(context: CollectionContext): TraceMetricsSnapshot =
+ TraceMetricsSnapshot(
+ elapsedTime.collect(context),
+ segments.map { case (identity, recorder) ⇒ (identity, recorder.collect(context)) }.toMap)
+
+ def cleanup: Unit = {}
+ }
+
+ case class TraceMetricsSnapshot(elapsedTime: Histogram.Snapshot, segments: Map[MetricIdentity, Histogram.Snapshot])
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = TraceMetricsSnapshot
+
+ def merge(that: TraceMetricsSnapshot, context: CollectionContext): TraceMetricsSnapshot =
+ TraceMetricsSnapshot(elapsedTime.merge(that.elapsedTime, context), Map.empty) // TODO: Merge the segments metrics correctly and test it!
+
+ def metrics: Map[MetricIdentity, MetricSnapshot] = segments + (ElapsedTime -> elapsedTime)
+ }
+
+ val Factory = new MetricGroupFactory {
+ type GroupRecorder = TraceMetricRecorder
+
+ def create(config: Config, system: ActorSystem): TraceMetricRecorder = {
+
+ val settings = config.getConfig("precision.trace")
+ val elapsedTimeConfig = settings.getConfig("elapsed-time")
+ val segmentConfig = settings.getConfig("segment")
+
+ new TraceMetricRecorder(
+ Histogram.fromConfig(elapsedTimeConfig, Scale.Nano),
+ () ⇒ Histogram.fromConfig(segmentConfig, Scale.Nano))
+ }
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
new file mode 100644
index 00000000..f3803d37
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
@@ -0,0 +1,163 @@
+package kamon.metric
+
+import akka.actor
+import akka.actor.{ ActorSystem, ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
+import com.typesafe.config.Config
+import kamon.Kamon
+import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
+
+import scala.collection.concurrent.TrieMap
+import scala.concurrent.duration.FiniteDuration
+
+class UserMetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ lazy val userMetricsRecorder = Kamon(Metrics)(system).register(UserMetrics, UserMetrics.Factory).get
+
+ def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram =
+ userMetricsRecorder.buildHistogram(name, precision, highestTrackableValue)
+
+ def registerHistogram(name: String): Histogram =
+ userMetricsRecorder.buildHistogram(name)
+
+ def registerCounter(name: String): Counter =
+ userMetricsRecorder.buildCounter(name)
+
+ def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration): MinMaxCounter = {
+ userMetricsRecorder.buildMinMaxCounter(name, precision, highestTrackableValue, refreshInterval)
+ }
+
+ def registerMinMaxCounter(name: String): MinMaxCounter =
+ userMetricsRecorder.buildMinMaxCounter(name)
+
+ def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ userMetricsRecorder.buildGauge(name)(currentValueCollector)
+
+ def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ userMetricsRecorder.buildGauge(name, precision, highestTrackableValue, refreshInterval, currentValueCollector)
+
+ def removeHistogram(name: String): Unit =
+ userMetricsRecorder.removeHistogram(name)
+
+ def removeCounter(name: String): Unit =
+ userMetricsRecorder.removeCounter(name)
+
+ def removeMinMaxCounter(name: String): Unit =
+ userMetricsRecorder.removeMinMaxCounter(name)
+
+ def removeGauge(name: String): Unit =
+ userMetricsRecorder.removeGauge(name)
+}
+
+object UserMetrics extends ExtensionId[UserMetricsExtension] with ExtensionIdProvider with MetricGroupIdentity {
+ def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
+ def createExtension(system: ExtendedActorSystem): UserMetricsExtension = new UserMetricsExtension(system)
+
+ val name: String = "user-metrics-recorder"
+ val category = new MetricGroupCategory {
+ val name: String = "user-metrics"
+ }
+
+ val Factory = new MetricGroupFactory {
+ type GroupRecorder = UserMetricsRecorder
+ def create(config: Config, system: ActorSystem): UserMetricsRecorder = new UserMetricsRecorder(system)
+ }
+
+ class UserMetricsRecorder(system: ActorSystem) extends MetricGroupRecorder {
+ val precisionConfig = system.settings.config.getConfig("kamon.metrics.precision")
+ val defaultHistogramPrecisionConfig = precisionConfig.getConfig("default-histogram-precision")
+ val defaultMinMaxCounterPrecisionConfig = precisionConfig.getConfig("default-min-max-counter-precision")
+ val defaultGaugePrecisionConfig = precisionConfig.getConfig("default-gauge-precision")
+
+ val histograms = TrieMap[String, Histogram]()
+ val counters = TrieMap[String, Counter]()
+ val minMaxCounters = TrieMap[String, MinMaxCounter]()
+ val gauges = TrieMap[String, Gauge]()
+
+ def buildHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram =
+ histograms.getOrElseUpdate(name, Histogram(highestTrackableValue, precision, Scale.Unit))
+
+ def buildHistogram(name: String): Histogram =
+ histograms.getOrElseUpdate(name, Histogram.fromConfig(defaultHistogramPrecisionConfig))
+
+ def buildCounter(name: String): Counter =
+ counters.getOrElseUpdate(name, Counter())
+
+ def buildMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration): MinMaxCounter = {
+ minMaxCounters.getOrElseUpdate(name, MinMaxCounter(highestTrackableValue, precision, Scale.Unit, refreshInterval, system))
+ }
+
+ def buildMinMaxCounter(name: String): MinMaxCounter =
+ minMaxCounters.getOrElseUpdate(name, MinMaxCounter.fromConfig(defaultMinMaxCounterPrecisionConfig, system))
+
+ def buildGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration, currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ gauges.getOrElseUpdate(name, Gauge(precision, highestTrackableValue, Scale.Unit, refreshInterval, system)(currentValueCollector))
+
+ def buildGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ gauges.getOrElseUpdate(name, Gauge.fromConfig(defaultGaugePrecisionConfig, system)(currentValueCollector))
+
+ def removeHistogram(name: String): Unit =
+ histograms.remove(name)
+
+ def removeCounter(name: String): Unit =
+ counters.remove(name)
+
+ def removeMinMaxCounter(name: String): Unit =
+ minMaxCounters.remove(name).map(_.cleanup)
+
+ def removeGauge(name: String): Unit =
+ gauges.remove(name).map(_.cleanup)
+
+ def collect(context: CollectionContext): UserMetricsSnapshot = {
+ val histogramSnapshots = histograms.map {
+ case (name, histogram) ⇒
+ (UserHistogram(name), histogram.collect(context))
+ } toMap
+
+ val counterSnapshots = counters.map {
+ case (name, counter) ⇒
+ (UserCounter(name), counter.collect(context))
+ } toMap
+
+ val minMaxCounterSnapshots = minMaxCounters.map {
+ case (name, minMaxCounter) ⇒
+ (UserMinMaxCounter(name), minMaxCounter.collect(context))
+ } toMap
+
+ val gaugeSnapshots = gauges.map {
+ case (name, gauge) ⇒
+ (UserGauge(name), gauge.collect(context))
+ } toMap
+
+ UserMetricsSnapshot(histogramSnapshots, counterSnapshots, minMaxCounterSnapshots, gaugeSnapshots)
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class UserHistogram(name: String) extends MetricIdentity
+ case class UserCounter(name: String) extends MetricIdentity
+ case class UserMinMaxCounter(name: String) extends MetricIdentity
+ case class UserGauge(name: String) extends MetricIdentity
+
+ case class UserMetricsSnapshot(histograms: Map[UserHistogram, Histogram.Snapshot],
+ counters: Map[UserCounter, Counter.Snapshot],
+ minMaxCounters: Map[UserMinMaxCounter, Histogram.Snapshot],
+ gauges: Map[UserGauge, Histogram.Snapshot])
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = UserMetricsSnapshot
+
+ def merge(that: UserMetricsSnapshot, context: CollectionContext): UserMetricsSnapshot =
+ UserMetricsSnapshot(
+ combineMaps(histograms, that.histograms)((l, r) ⇒ l.merge(r, context)),
+ combineMaps(counters, that.counters)((l, r) ⇒ l.merge(r, context)),
+ combineMaps(minMaxCounters, that.minMaxCounters)((l, r) ⇒ l.merge(r, context)),
+ combineMaps(gauges, that.gauges)((l, r) ⇒ l.merge(r, context)))
+
+ def metrics: Map[MetricIdentity, MetricSnapshot] = histograms ++ counters ++ minMaxCounters ++ gauges
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
new file mode 100644
index 00000000..b592bcd3
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
@@ -0,0 +1,59 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import jsr166e.LongAdder
+import kamon.metric.{ CollectionContext, MetricSnapshot, MetricRecorder }
+
+trait Counter extends MetricRecorder {
+ type SnapshotType = Counter.Snapshot
+
+ def increment(): Unit
+ def increment(times: Long): Unit
+}
+
+object Counter {
+
+ def apply(): Counter = new LongAdderCounter
+
+ trait Snapshot extends MetricSnapshot {
+ type SnapshotType = Counter.Snapshot
+
+ def count: Long
+ def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot
+ }
+}
+
+class LongAdderCounter extends Counter {
+ private val counter = new LongAdder
+
+ def increment(): Unit = counter.increment()
+
+ def increment(times: Long): Unit = {
+ if (times < 0)
+ throw new UnsupportedOperationException("Counters cannot be decremented")
+ counter.add(times)
+ }
+
+ def collect(context: CollectionContext): Counter.Snapshot = CounterSnapshot(counter.sumThenReset())
+
+ def cleanup: Unit = {}
+}
+
+case class CounterSnapshot(count: Long) extends Counter.Snapshot {
+ def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot = CounterSnapshot(count + that.count)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
new file mode 100644
index 00000000..efd7d78f
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
@@ -0,0 +1,82 @@
+package kamon.metric.instrument
+
+import java.util.concurrent.TimeUnit
+import java.util.concurrent.atomic.AtomicReference
+
+import akka.actor.{ Cancellable, ActorSystem }
+import com.typesafe.config.Config
+import kamon.metric.{ CollectionContext, Scale, MetricRecorder }
+
+import scala.concurrent.duration.FiniteDuration
+
+trait Gauge extends MetricRecorder {
+ type SnapshotType = Histogram.Snapshot
+
+ def record(value: Long)
+ def record(value: Long, count: Long)
+}
+
+object Gauge {
+
+ trait CurrentValueCollector {
+ def currentValue: Long
+ }
+
+ def apply(precision: Histogram.Precision, highestTrackableValue: Long, scale: Scale, refreshInterval: FiniteDuration,
+ system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
+
+ val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
+ val gauge = new HistogramBackedGauge(underlyingHistogram, currentValueCollector)
+
+ val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ gauge.refreshValue()
+ }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+
+ gauge.refreshValuesSchedule.set(refreshValuesSchedule)
+ gauge
+ }
+
+ def fromDefaultConfig(system: ActorSystem)(currentValueCollectorFunction: () ⇒ Long): Gauge =
+ fromDefaultConfig(system, functionZeroAsCurrentValueCollector(currentValueCollectorFunction))
+
+ def fromDefaultConfig(system: ActorSystem, currentValueCollector: CurrentValueCollector): Gauge = {
+ val config = system.settings.config.getConfig("kamon.metrics.precision.default-gauge-precision")
+ fromConfig(config, system)(currentValueCollector)
+ }
+
+ def fromConfig(config: Config, system: ActorSystem, scale: Scale)(currentValueCollector: CurrentValueCollector): Gauge = {
+ import scala.concurrent.duration._
+
+ val highest = config.getLong("highest-trackable-value")
+ val significantDigits = config.getInt("significant-value-digits")
+ val refreshInterval = config.getDuration("refresh-interval", TimeUnit.MILLISECONDS)
+
+ Gauge(Histogram.Precision(significantDigits), highest, scale, refreshInterval.millis, system)(currentValueCollector)
+ }
+
+ def fromConfig(config: Config, system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
+ fromConfig(config, system, Scale.Unit)(currentValueCollector)
+ }
+
+ implicit def functionZeroAsCurrentValueCollector(f: () ⇒ Long): CurrentValueCollector = new CurrentValueCollector {
+ def currentValue: Long = f.apply()
+ }
+}
+
+class HistogramBackedGauge(underlyingHistogram: Histogram, currentValueCollector: Gauge.CurrentValueCollector) extends Gauge {
+ val refreshValuesSchedule = new AtomicReference[Cancellable]()
+
+ def record(value: Long): Unit = underlyingHistogram.record(value)
+
+ def record(value: Long, count: Long): Unit = underlyingHistogram.record(value, count)
+
+ def collect(context: CollectionContext): Histogram.Snapshot = underlyingHistogram.collect(context)
+
+ def cleanup: Unit = {
+ if (refreshValuesSchedule.get() != null)
+ refreshValuesSchedule.get().cancel()
+ }
+
+ def refreshValue(): Unit = underlyingHistogram.record(currentValueCollector.currentValue)
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
new file mode 100644
index 00000000..67db5d93
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
@@ -0,0 +1,263 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+import com.typesafe.config.Config
+import org.HdrHistogram.AtomicHistogramFieldsAccessor
+import org.HdrHistogram.AtomicHistogram
+import kamon.metric._
+
+trait Histogram extends MetricRecorder {
+ type SnapshotType = Histogram.Snapshot
+
+ def record(value: Long)
+ def record(value: Long, count: Long)
+}
+
+object Histogram {
+
+ def apply(highestTrackableValue: Long, precision: Precision, scale: Scale): Histogram =
+ new HdrHistogram(1L, highestTrackableValue, precision.significantDigits, scale)
+
+ def fromConfig(config: Config): Histogram = {
+ fromConfig(config, Scale.Unit)
+ }
+
+ def fromConfig(config: Config, scale: Scale): Histogram = {
+ val highest = config.getLong("highest-trackable-value")
+ val significantDigits = config.getInt("significant-value-digits")
+
+ new HdrHistogram(1L, highest, significantDigits, scale)
+ }
+
+ object HighestTrackableValue {
+ val OneHourInNanoseconds = 3600L * 1000L * 1000L * 1000L
+ }
+
+ case class Precision(significantDigits: Int)
+ object Precision {
+ val Low = Precision(1)
+ val Normal = Precision(2)
+ val Fine = Precision(3)
+ }
+
+ trait Record {
+ def level: Long
+ def count: Long
+
+ private[kamon] def rawCompactRecord: Long
+ }
+
+ case class MutableRecord(var level: Long, var count: Long) extends Record {
+ var rawCompactRecord: Long = 0L
+ }
+
+ trait Snapshot extends MetricSnapshot {
+ type SnapshotType = Histogram.Snapshot
+
+ def isEmpty: Boolean = numberOfMeasurements == 0
+ def scale: Scale
+ def numberOfMeasurements: Long
+ def min: Long
+ def max: Long
+ def recordsIterator: Iterator[Record]
+ def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot
+ }
+
+ object Snapshot {
+ def empty(targetScale: Scale) = new Snapshot {
+ override def min: Long = 0L
+ override def max: Long = 0L
+ override def recordsIterator: Iterator[Record] = Iterator.empty
+ override def merge(that: Snapshot, context: CollectionContext): Snapshot = that
+ override def scale: Scale = targetScale
+ override def numberOfMeasurements: Long = 0L
+ }
+ }
+}
+
+/**
+ * This implementation is meant to be used for real time data collection where data snapshots are taken often over time.
+ * The collect(..) operation extracts all the recorded values from the histogram and resets the counts, but still
+ * leave it in a consistent state even in the case of concurrent modification while the snapshot is being taken.
+ */
+class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, significantValueDigits: Int, scale: Scale = Scale.Unit)
+ extends AtomicHistogram(lowestTrackableValue, highestTrackableValue, significantValueDigits)
+ with Histogram with AtomicHistogramFieldsAccessor {
+
+ import AtomicHistogramFieldsAccessor.totalCountUpdater
+
+ def record(value: Long): Unit = recordValue(value)
+
+ def record(value: Long, count: Long): Unit = recordValueWithCount(value, count)
+
+ def collect(context: CollectionContext): Histogram.Snapshot = {
+ import context.buffer
+ buffer.clear()
+ val nrOfMeasurements = writeSnapshotTo(buffer)
+
+ buffer.flip()
+
+ val measurementsArray = Array.ofDim[Long](buffer.limit())
+ buffer.get(measurementsArray, 0, measurementsArray.length)
+ new CompactHdrSnapshot(scale, nrOfMeasurements, measurementsArray, unitMagnitude(), subBucketHalfCount(), subBucketHalfCountMagnitude())
+ }
+
+ def getCounts = countsArray().length()
+
+ def cleanup: Unit = {}
+
+ private def writeSnapshotTo(buffer: LongBuffer): Long = {
+ val counts = countsArray()
+ val countsLength = counts.length()
+
+ var nrOfMeasurements = 0L
+ var index = 0L
+ while (index < countsLength) {
+ val countAtIndex = counts.getAndSet(index.toInt, 0L)
+
+ if (countAtIndex > 0) {
+ buffer.put(CompactHdrSnapshot.compactRecord(index, countAtIndex))
+ nrOfMeasurements += countAtIndex
+ }
+
+ index += 1
+ }
+
+ reestablishTotalCount(nrOfMeasurements)
+ nrOfMeasurements
+ }
+
+ private def reestablishTotalCount(diff: Long): Unit = {
+ def tryUpdateTotalCount: Boolean = {
+ val previousTotalCount = totalCountUpdater.get(this)
+ val newTotalCount = previousTotalCount - diff
+
+ totalCountUpdater.compareAndSet(this, previousTotalCount, newTotalCount)
+ }
+
+ while (!tryUpdateTotalCount) {}
+ }
+
+}
+
+class CompactHdrSnapshot(val scale: Scale, val numberOfMeasurements: Long, compactRecords: Array[Long], unitMagnitude: Int,
+ subBucketHalfCount: Int, subBucketHalfCountMagnitude: Int) extends Histogram.Snapshot {
+
+ def min: Long = if (compactRecords.length == 0) 0 else levelFromCompactRecord(compactRecords(0))
+ def max: Long = if (compactRecords.length == 0) 0 else levelFromCompactRecord(compactRecords(compactRecords.length - 1))
+
+ def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot = {
+ if (that.isEmpty) this else if (this.isEmpty) that else {
+ import context.buffer
+ buffer.clear()
+
+ val selfIterator = recordsIterator
+ val thatIterator = that.recordsIterator
+ var thatCurrentRecord: Histogram.Record = null
+ var mergedNumberOfMeasurements = 0L
+
+ def nextOrNull(iterator: Iterator[Histogram.Record]): Histogram.Record = if (iterator.hasNext) iterator.next() else null
+ def addToBuffer(compactRecord: Long): Unit = {
+ mergedNumberOfMeasurements += countFromCompactRecord(compactRecord)
+ buffer.put(compactRecord)
+ }
+
+ while (selfIterator.hasNext) {
+ val selfCurrentRecord = selfIterator.next()
+
+ // Advance that to no further than the level of selfCurrentRecord
+ thatCurrentRecord = if (thatCurrentRecord == null) nextOrNull(thatIterator) else thatCurrentRecord
+ while (thatCurrentRecord != null && thatCurrentRecord.level < selfCurrentRecord.level) {
+ addToBuffer(thatCurrentRecord.rawCompactRecord)
+ thatCurrentRecord = nextOrNull(thatIterator)
+ }
+
+ // Include the current record of self and optionally merge if has the same level as thatCurrentRecord
+ if (thatCurrentRecord != null && thatCurrentRecord.level == selfCurrentRecord.level) {
+ addToBuffer(mergeCompactRecords(thatCurrentRecord.rawCompactRecord, selfCurrentRecord.rawCompactRecord))
+ thatCurrentRecord = nextOrNull(thatIterator)
+ } else {
+ addToBuffer(selfCurrentRecord.rawCompactRecord)
+ }
+ }
+
+ // Include everything that might have been left from that
+ if (thatCurrentRecord != null) addToBuffer(thatCurrentRecord.rawCompactRecord)
+ while (thatIterator.hasNext) {
+ addToBuffer(thatIterator.next().rawCompactRecord)
+ }
+
+ buffer.flip()
+ val compactRecords = Array.ofDim[Long](buffer.limit())
+ buffer.get(compactRecords)
+
+ new CompactHdrSnapshot(scale, mergedNumberOfMeasurements, compactRecords, unitMagnitude, subBucketHalfCount, subBucketHalfCountMagnitude)
+ }
+ }
+
+ @inline private def mergeCompactRecords(left: Long, right: Long): Long = {
+ val index = left >> 48
+ val leftCount = countFromCompactRecord(left)
+ val rightCount = countFromCompactRecord(right)
+
+ CompactHdrSnapshot.compactRecord(index, leftCount + rightCount)
+ }
+
+ @inline private def levelFromCompactRecord(compactRecord: Long): Long = {
+ val countsArrayIndex = (compactRecord >> 48).toInt
+ var bucketIndex: Int = (countsArrayIndex >> subBucketHalfCountMagnitude) - 1
+ var subBucketIndex: Int = (countsArrayIndex & (subBucketHalfCount - 1)) + subBucketHalfCount
+ if (bucketIndex < 0) {
+ subBucketIndex -= subBucketHalfCount
+ bucketIndex = 0
+ }
+
+ subBucketIndex.toLong << (bucketIndex + unitMagnitude)
+ }
+
+ @inline private def countFromCompactRecord(compactRecord: Long): Long =
+ compactRecord & CompactHdrSnapshot.CompactRecordCountMask
+
+ def recordsIterator: Iterator[Histogram.Record] = new Iterator[Histogram.Record] {
+ var currentIndex = 0
+ val mutableRecord = Histogram.MutableRecord(0, 0)
+
+ override def hasNext: Boolean = currentIndex < compactRecords.length
+
+ override def next(): Histogram.Record = {
+ if (hasNext) {
+ val measurement = compactRecords(currentIndex)
+ mutableRecord.rawCompactRecord = measurement
+ mutableRecord.level = levelFromCompactRecord(measurement)
+ mutableRecord.count = countFromCompactRecord(measurement)
+ currentIndex += 1
+
+ mutableRecord
+ } else {
+ throw new IllegalStateException("The iterator has already been consumed.")
+ }
+ }
+ }
+}
+
+object CompactHdrSnapshot {
+ val CompactRecordCountMask = 0xFFFFFFFFFFFFL
+
+ def compactRecord(index: Long, count: Long): Long = (index << 48) | count
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
new file mode 100644
index 00000000..471e7bd4
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
@@ -0,0 +1,116 @@
+package kamon.metric.instrument
+
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+import java.lang.Math.abs
+import java.util.concurrent.TimeUnit
+import java.util.concurrent.atomic.AtomicReference
+import akka.actor.{ ActorSystem, Cancellable }
+import com.typesafe.config.Config
+import jsr166e.LongMaxUpdater
+import kamon.metric.{ Scale, MetricRecorder, CollectionContext }
+import kamon.util.PaddedAtomicLong
+import scala.concurrent.duration.FiniteDuration
+
+trait MinMaxCounter extends MetricRecorder {
+ override type SnapshotType = Histogram.Snapshot
+
+ def increment(): Unit
+ def increment(times: Long): Unit
+ def decrement()
+ def decrement(times: Long)
+}
+
+object MinMaxCounter {
+
+ def apply(highestTrackableValue: Long, precision: Histogram.Precision, scale: Scale, refreshInterval: FiniteDuration,
+ system: ActorSystem): MinMaxCounter = {
+
+ val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
+ val minMaxCounter = new PaddedMinMaxCounter(underlyingHistogram)
+
+ val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ minMaxCounter.refreshValues()
+ }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+
+ minMaxCounter.refreshValuesSchedule.set(refreshValuesSchedule)
+ minMaxCounter
+ }
+
+ def fromConfig(config: Config, system: ActorSystem): MinMaxCounter = {
+ import scala.concurrent.duration._
+
+ val highest = config.getLong("highest-trackable-value")
+ val significantDigits = config.getInt("significant-value-digits")
+ val refreshInterval = config.getDuration("refresh-interval", TimeUnit.MILLISECONDS)
+
+ apply(highest, Histogram.Precision(significantDigits), Scale.Unit, refreshInterval.millis, system)
+ }
+}
+
+class PaddedMinMaxCounter(underlyingHistogram: Histogram) extends MinMaxCounter {
+ private val min = new LongMaxUpdater
+ private val max = new LongMaxUpdater
+ private val sum = new PaddedAtomicLong
+ val refreshValuesSchedule = new AtomicReference[Cancellable]()
+
+ min.update(0L)
+ max.update(0L)
+
+ def increment(): Unit = increment(1L)
+
+ def increment(times: Long): Unit = {
+ val currentValue = sum.addAndGet(times)
+ max.update(currentValue)
+ }
+
+ def decrement(): Unit = decrement(1L)
+
+ def decrement(times: Long): Unit = {
+ val currentValue = sum.addAndGet(-times)
+ min.update(-currentValue)
+ }
+
+ def collect(context: CollectionContext): Histogram.Snapshot = {
+ refreshValues()
+ underlyingHistogram.collect(context)
+ }
+
+ def cleanup: Unit = {
+ if (refreshValuesSchedule.get() != null)
+ refreshValuesSchedule.get().cancel()
+ }
+
+ def refreshValues(): Unit = {
+ val currentValue = {
+ val value = sum.get()
+ if (value < 0) 0 else value
+ }
+
+ val currentMin = {
+ val minAbs = abs(min.maxThenReset())
+ if (minAbs <= currentValue) minAbs else 0
+ }
+
+ underlyingHistogram.record(currentValue)
+ underlyingHistogram.record(currentMin)
+ underlyingHistogram.record(max.maxThenReset())
+
+ max.update(currentValue)
+ min.update(-currentValue)
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/metrics/package.scala b/kamon-core/src/main/scala/kamon/metric/package.scala
index 640157a9..43166058 100644
--- a/kamon-core/src/main/scala/kamon/metrics/package.scala
+++ b/kamon-core/src/main/scala/kamon/metric/package.scala
@@ -19,12 +19,7 @@ package kamon
import scala.annotation.tailrec
import com.typesafe.config.Config
-package object metrics {
-
- case class HdrPrecisionConfig(highestTrackableValue: Long, significantValueDigits: Int)
-
- def extractPrecisionConfig(config: Config): HdrPrecisionConfig =
- HdrPrecisionConfig(config.getLong("highest-trackable-value"), config.getInt("significant-value-digits"))
+package object metric {
@tailrec def combineMaps[K, V](left: Map[K, V], right: Map[K, V])(valueMerger: (V, V) ⇒ V): Map[K, V] = {
if (right.isEmpty)
diff --git a/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala
deleted file mode 100644
index 9e19dced..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/ActorMetrics.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import com.typesafe.config.Config
-import kamon.metrics.instruments.CounterRecorder
-import org.HdrHistogram.HdrRecorder
-
-case class ActorMetrics(name: String) extends MetricGroupIdentity {
- val category = ActorMetrics
-}
-
-object ActorMetrics extends MetricGroupCategory {
- val name = "actor"
-
- case object ProcessingTime extends MetricIdentity { val name, tag = "processing-time" }
- case object MailboxSize extends MetricIdentity { val name, tag = "mailbox-size" }
- case object TimeInMailbox extends MetricIdentity { val name, tag = "time-in-mailbox" }
- case object ErrorCounter extends MetricIdentity { val name, tag = "errors" }
-
- case class ActorMetricRecorder(processingTime: MetricRecorder, mailboxSize: MetricRecorder, timeInMailbox: MetricRecorder, errorCounter: MetricRecorder)
- extends MetricGroupRecorder {
-
- def collect: MetricGroupSnapshot = {
- ActorMetricSnapshot(processingTime.collect(), mailboxSize.collect(), timeInMailbox.collect(), errorCounter.collect())
- }
- }
-
- case class ActorMetricSnapshot(processingTime: MetricSnapshotLike, mailboxSize: MetricSnapshotLike, timeInMailbox: MetricSnapshotLike, errorCounter: MetricSnapshotLike)
- extends MetricGroupSnapshot {
-
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = Map(
- (ProcessingTime -> processingTime),
- (MailboxSize -> mailboxSize),
- (TimeInMailbox -> timeInMailbox),
- (ErrorCounter -> errorCounter))
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = ActorMetricRecorder
-
- def create(config: Config): ActorMetricRecorder = {
- val settings = config.getConfig("precision.actor")
-
- val processingTimeConfig = extractPrecisionConfig(settings.getConfig("processing-time"))
- val mailboxSizeConfig = extractPrecisionConfig(settings.getConfig("mailbox-size"))
- val timeInMailboxConfig = extractPrecisionConfig(settings.getConfig("time-in-mailbox"))
-
- new ActorMetricRecorder(
- HdrRecorder(processingTimeConfig.highestTrackableValue, processingTimeConfig.significantValueDigits, Scale.Nano),
- HdrRecorder(mailboxSizeConfig.highestTrackableValue, mailboxSizeConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(timeInMailboxConfig.highestTrackableValue, timeInMailboxConfig.significantValueDigits, Scale.Nano),
- CounterRecorder())
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala b/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala
deleted file mode 100644
index cd0afac1..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/CustomMetric.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import kamon.metrics.instruments.ContinuousHdrRecorder
-import org.HdrHistogram.HdrRecorder
-import com.typesafe.config.Config
-
-case class CustomMetric(name: String) extends MetricGroupIdentity {
- val category = CustomMetric
-}
-
-object CustomMetric extends MetricGroupCategory {
- val name = "custom-metric"
- val RecordedValues = new MetricIdentity { val name, tag = "recorded-values" }
-
- def histogram(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale, continuous: Boolean = false) =
- new MetricGroupFactory {
-
- type GroupRecorder = CustomMetricRecorder
-
- def create(config: Config): CustomMetricRecorder = {
- val recorder =
- if (continuous) ContinuousHdrRecorder(highestTrackableValue, significantValueDigits, scale)
- else HdrRecorder(highestTrackableValue, significantValueDigits, scale)
-
- new CustomMetricRecorder(RecordedValues, recorder)
- }
- }
-
- class CustomMetricRecorder(identity: MetricIdentity, underlyingRecorder: HdrRecorder)
- extends MetricGroupRecorder {
-
- def record(value: Long): Unit = underlyingRecorder.record(value)
-
- def collect: MetricGroupSnapshot = DefaultMetricGroupSnapshot(Map((identity, underlyingRecorder.collect())))
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala
deleted file mode 100644
index f41e0c3f..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/DispatcherMetrics.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import com.typesafe.config.Config
-import org.HdrHistogram.HdrRecorder
-
-case class DispatcherMetrics(name: String) extends MetricGroupIdentity {
- val category = DispatcherMetrics
-}
-
-object DispatcherMetrics extends MetricGroupCategory {
- val name = "dispatcher"
-
- case object MaximumPoolSize extends MetricIdentity { val name, tag = "maximum-pool-size" }
- case object RunningThreadCount extends MetricIdentity { val name, tag = "running-thread-count" }
- case object QueueTaskCount extends MetricIdentity { val name, tag = "queued-task-count" }
- case object PoolSize extends MetricIdentity { val name, tag = "pool-size" }
-
- case class DispatcherMetricRecorder(maximumPoolSize: MetricRecorder, runningThreadCount: MetricRecorder, queueTaskCount: MetricRecorder, poolSize: MetricRecorder)
- extends MetricGroupRecorder {
-
- def collect: MetricGroupSnapshot = {
- DispatcherMetricSnapshot(maximumPoolSize.collect(), runningThreadCount.collect(), queueTaskCount.collect(), poolSize.collect())
- }
- }
-
- case class DispatcherMetricSnapshot(maximumPoolSize: MetricSnapshotLike, runningThreadCount: MetricSnapshotLike, queueTaskCount: MetricSnapshotLike, poolSize: MetricSnapshotLike)
- extends MetricGroupSnapshot {
-
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = Map(
- (MaximumPoolSize -> maximumPoolSize),
- (RunningThreadCount -> runningThreadCount),
- (QueueTaskCount -> queueTaskCount),
- (PoolSize -> poolSize))
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = DispatcherMetricRecorder
-
- def create(config: Config): DispatcherMetricRecorder = {
- val settings = config.getConfig("precision.dispatcher")
-
- val MaximumPoolSizeConfig = extractPrecisionConfig(settings.getConfig("maximum-pool-size"))
- val RunningThreadCountConfig = extractPrecisionConfig(settings.getConfig("running-thread-count"))
- val QueueTaskCountConfig = extractPrecisionConfig(settings.getConfig("queued-task-count"))
- val PoolSizeConfig = extractPrecisionConfig(settings.getConfig("pool-size"))
-
- new DispatcherMetricRecorder(
- HdrRecorder(MaximumPoolSizeConfig.highestTrackableValue, MaximumPoolSizeConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(RunningThreadCountConfig.highestTrackableValue, RunningThreadCountConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(QueueTaskCountConfig.highestTrackableValue, QueueTaskCountConfig.significantValueDigits, Scale.Unit),
- HdrRecorder(PoolSizeConfig.highestTrackableValue, PoolSizeConfig.significantValueDigits, Scale.Unit))
- }
- }
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metrics/Metrics.scala b/kamon-core/src/main/scala/kamon/metrics/Metrics.scala
deleted file mode 100644
index f07bf38e..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/Metrics.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import annotation.tailrec
-import com.typesafe.config.Config
-import kamon.metrics.MetricSnapshot.Measurement
-import kamon.metrics.InstrumentTypes.InstrumentType
-
-trait MetricGroupCategory {
- def name: String
-}
-
-trait MetricGroupIdentity {
- def name: String
- def category: MetricGroupCategory
-}
-
-trait MetricIdentity {
- def name: String
- def tag: String
-}
-
-trait MetricGroupRecorder {
- def collect: MetricGroupSnapshot
-}
-
-trait MetricGroupSnapshot {
- def metrics: Map[MetricIdentity, MetricSnapshotLike]
-}
-
-case class DefaultMetricGroupSnapshot(metrics: Map[MetricIdentity, MetricSnapshotLike]) extends MetricGroupSnapshot
-
-trait MetricRecorder {
- def record(value: Long)
- def collect(): MetricSnapshotLike
-}
-
-object InstrumentTypes {
- sealed trait InstrumentType
- case object Histogram extends InstrumentType
- case object Gauge extends InstrumentType
- case object Counter extends InstrumentType
-}
-
-trait MetricSnapshotLike {
- def instrumentType: InstrumentType
- def numberOfMeasurements: Long
- def scale: Scale
- def measurements: Vector[Measurement]
-
- def max: Long = measurements.lastOption.map(_.value).getOrElse(0)
- def min: Long = measurements.headOption.map(_.value).getOrElse(0)
-
- def merge(that: MetricSnapshotLike): MetricSnapshotLike = {
- val mergedMeasurements = Vector.newBuilder[Measurement]
-
- @tailrec def go(left: Vector[Measurement], right: Vector[Measurement], totalNrOfMeasurements: Long): Long = {
- if (left.nonEmpty && right.nonEmpty) {
- val leftValue = left.head
- val rightValue = right.head
-
- if (rightValue.value == leftValue.value) {
- val merged = rightValue.merge(leftValue)
- mergedMeasurements += merged
- go(left.tail, right.tail, totalNrOfMeasurements + merged.count)
- } else {
- if (leftValue.value < rightValue.value) {
- mergedMeasurements += leftValue
- go(left.tail, right, totalNrOfMeasurements + leftValue.count)
- } else {
- mergedMeasurements += rightValue
- go(left, right.tail, totalNrOfMeasurements + rightValue.count)
- }
- }
- } else {
- if (left.isEmpty && right.nonEmpty) {
- mergedMeasurements += right.head
- go(left, right.tail, totalNrOfMeasurements + right.head.count)
- } else {
- if (left.nonEmpty && right.isEmpty) {
- mergedMeasurements += left.head
- go(left.tail, right, totalNrOfMeasurements + left.head.count)
- } else totalNrOfMeasurements
- }
- }
- }
-
- val totalNrOfMeasurements = go(measurements, that.measurements, 0)
- MetricSnapshot(instrumentType, totalNrOfMeasurements, scale, mergedMeasurements.result())
- }
-}
-
-case class MetricSnapshot(instrumentType: InstrumentType, numberOfMeasurements: Long, scale: Scale,
- measurements: Vector[MetricSnapshot.Measurement]) extends MetricSnapshotLike
-
-object MetricSnapshot {
- case class Measurement(value: Long, count: Long) {
- def merge(that: Measurement) = Measurement(value, count + that.count)
- }
-}
-
-trait MetricGroupFactory {
- type GroupRecorder <: MetricGroupRecorder
- def create(config: Config): GroupRecorder
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala b/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala
deleted file mode 100644
index c9990229..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/Subscriptions.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import akka.actor.{ Props, ActorRef, Actor }
-import kamon.metrics.Subscriptions.{ MetricGroupFilter, FlushMetrics, TickMetricSnapshot, Subscribe }
-import kamon.util.GlobPathFilter
-import scala.concurrent.duration.{ FiniteDuration, Duration }
-import java.util.concurrent.TimeUnit
-import kamon.Kamon
-import kamon.metrics.TickMetricSnapshotBuffer.{ Combined, FlushBuffer }
-
-class Subscriptions extends Actor {
- import context.system
-
- val config = context.system.settings.config
- val tickInterval = Duration(config.getDuration("kamon.metrics.tick-interval", TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS)
- val flushMetricsSchedule = context.system.scheduler.schedule(tickInterval, tickInterval, self, FlushMetrics)(context.dispatcher)
-
- var lastTick: Long = System.currentTimeMillis()
- var subscribedPermanently: Map[MetricGroupFilter, List[ActorRef]] = Map.empty
- var subscribedForOneShot: Map[MetricGroupFilter, List[ActorRef]] = Map.empty
-
- def receive = {
- case Subscribe(category, selection, permanent) ⇒ subscribe(category, selection, permanent)
- case FlushMetrics ⇒ flush()
- }
-
- def subscribe(category: MetricGroupCategory, selection: String, permanent: Boolean): Unit = {
- val filter = MetricGroupFilter(category, new GlobPathFilter(selection))
- if (permanent) {
- val receivers = subscribedPermanently.get(filter).getOrElse(Nil)
- subscribedPermanently = subscribedPermanently.updated(filter, sender :: receivers)
-
- } else {
- val receivers = subscribedForOneShot.get(filter).getOrElse(Nil)
- subscribedForOneShot = subscribedForOneShot.updated(filter, sender :: receivers)
- }
-
- }
-
- def flush(): Unit = {
- val currentTick = System.currentTimeMillis()
- val snapshots = Kamon(Metrics).collect
-
- dispatchSelectedMetrics(lastTick, currentTick, subscribedPermanently, snapshots)
- dispatchSelectedMetrics(lastTick, currentTick, subscribedForOneShot, snapshots)
-
- lastTick = currentTick
- subscribedForOneShot = Map.empty
- }
-
- def dispatchSelectedMetrics(lastTick: Long, currentTick: Long, subscriptions: Map[MetricGroupFilter, List[ActorRef]],
- snapshots: Map[MetricGroupIdentity, MetricGroupSnapshot]): Unit = {
-
- for ((filter, receivers) ← subscriptions) yield {
- val selection = snapshots.filter(group ⇒ filter.accept(group._1))
- val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
-
- receivers.foreach(_ ! tickMetrics)
- }
- }
-}
-
-object Subscriptions {
- case object FlushMetrics
- case class Subscribe(category: MetricGroupCategory, selection: String, permanently: Boolean = false)
- case class TickMetricSnapshot(from: Long, to: Long, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot])
-
- case class MetricGroupFilter(category: MetricGroupCategory, globFilter: GlobPathFilter) {
- def accept(identity: MetricGroupIdentity): Boolean = {
- category.equals(identity.category) && globFilter.accept(identity.name)
- }
- }
-}
-
-class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
- val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
-
- def receive = empty
-
- def empty: Actor.Receive = {
- case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
- case FlushBuffer ⇒ // Nothing to flush.
- }
-
- def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
- case TickMetricSnapshot(_, to, tickMetrics) ⇒
- val combinedMetrics = combineMaps(buffered.metrics, tickMetrics)(mergeMetricGroup)
- val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
-
- context become (buffering(combinedSnapshot))
-
- case FlushBuffer ⇒
- receiver ! buffered
- context become (empty)
-
- }
-
- override def postStop(): Unit = {
- flushSchedule.cancel()
- super.postStop()
- }
-
- def mergeMetricGroup(left: MetricGroupSnapshot, right: MetricGroupSnapshot) = Combined(combineMaps(left.metrics, right.metrics)((l, r) ⇒ l.merge(r)))
-}
-
-object TickMetricSnapshotBuffer {
- case object FlushBuffer
-
- case class Combined(metrics: Map[MetricIdentity, MetricSnapshotLike]) extends MetricGroupSnapshot
-
- def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
- Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala b/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala
deleted file mode 100644
index 5454edf5..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/TraceMetrics.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import org.HdrHistogram.HdrRecorder
-import scala.collection.concurrent.TrieMap
-import com.typesafe.config.Config
-
-case class TraceMetrics(name: String) extends MetricGroupIdentity {
- val category = TraceMetrics
-}
-
-object TraceMetrics extends MetricGroupCategory {
- val name = "trace"
-
- case object ElapsedTime extends MetricIdentity { val name, tag = "elapsed-time" }
- case class HttpClientRequest(name: String, tag: String) extends MetricIdentity
-
- class TraceMetricRecorder(val elapsedTime: HdrRecorder, private val segmentRecorderFactory: () ⇒ HdrRecorder)
- extends MetricGroupRecorder {
-
- private val segments = TrieMap[MetricIdentity, HdrRecorder]()
-
- def segmentRecorder(segmentIdentity: MetricIdentity): HdrRecorder =
- segments.getOrElseUpdate(segmentIdentity, segmentRecorderFactory.apply())
-
- def collect: MetricGroupSnapshot = TraceMetricSnapshot(elapsedTime.collect(),
- segments.map { case (identity, recorder) ⇒ (identity, recorder.collect()) }.toMap)
- }
-
- case class TraceMetricSnapshot(elapsedTime: MetricSnapshotLike, segments: Map[MetricIdentity, MetricSnapshotLike])
- extends MetricGroupSnapshot {
-
- def metrics: Map[MetricIdentity, MetricSnapshotLike] = segments + (ElapsedTime -> elapsedTime)
- }
-
- val Factory = new MetricGroupFactory {
- type GroupRecorder = TraceMetricRecorder
-
- def create(config: Config): TraceMetricRecorder = {
-
- val settings = config.getConfig("precision.trace")
- val elapsedTimeConfig = extractPrecisionConfig(settings.getConfig("elapsed-time"))
- val segmentConfig = extractPrecisionConfig(settings.getConfig("segment"))
-
- new TraceMetricRecorder(
- HdrRecorder(elapsedTimeConfig.highestTrackableValue, elapsedTimeConfig.significantValueDigits, Scale.Nano),
- () ⇒ HdrRecorder(segmentConfig.highestTrackableValue, segmentConfig.significantValueDigits, Scale.Nano))
- }
- }
-
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala
deleted file mode 100644
index 3a39ec69..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/ContinuousHdrRecorder.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics.instruments
-
-import org.HdrHistogram.HdrRecorder
-import kamon.metrics.{ Scale, MetricSnapshotLike }
-
-/**
- * This recorder keeps track of the last value recoded and automatically adds it after collecting a snapshot. This is
- * useful in cases where the absence of recordings does not necessarily mean the absence of values. For example, if this
- * recorder is used for recording the mailbox size of an actor, and it only gets updated upon message enqueue o dequeue,
- * the absence of recordings during 1 second means that the size hasn't change (example: the actor being blocked doing
- * some work) and it should keep its last known value, instead of dropping to zero and then going back to the real value
- * after a new event is processed.
- *
- */
-class ContinuousHdrRecorder(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale)
- extends HdrRecorder(highestTrackableValue, significantValueDigits, scale) {
-
- @volatile private var lastRecordedValue: Long = 0
-
- override def record(value: Long): Unit = {
- lastRecordedValue = value
- super.record(value)
- }
-
- override def collect(): MetricSnapshotLike = {
- val snapshot = super.collect()
- super.record(lastRecordedValue)
-
- snapshot
- }
-}
-
-object ContinuousHdrRecorder {
- def apply(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale) =
- new ContinuousHdrRecorder(highestTrackableValue, significantValueDigits, scale)
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala
deleted file mode 100644
index ce4fd76d..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/HdrRecorder.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package org.HdrHistogram
-
-import java.util.concurrent.atomic.AtomicLongFieldUpdater
-import scala.annotation.tailrec
-import kamon.metrics._
-
-/**
- * This implementation aims to be used for real time data collection where data snapshots are taken often over time.
- * The snapshotAndReset() operation extracts all the recorded values from the histogram and resets the counts, but still
- * leave it in a consistent state even in the case of concurrent modification while the snapshot is being taken.
- */
-class HdrRecorder(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale)
- extends AtomicHistogram(1L, highestTrackableValue, significantValueDigits) with MetricRecorder {
-
- import HdrRecorder.totalCountUpdater
-
- def record(value: Long): Unit = recordValue(value)
-
- def collect(): MetricSnapshotLike = {
- val entries = Vector.newBuilder[MetricSnapshot.Measurement]
- val countsLength = counts.length()
-
- @tailrec def iterate(index: Int, previousValue: Long, nrOfRecordings: Long, bucketLimit: Long, increment: Long): Long = {
- if (index < countsLength) {
- val currentValue = previousValue + increment
- val countAtValue = counts.getAndSet(index, 0)
-
- if (countAtValue > 0)
- entries += MetricSnapshot.Measurement(currentValue, countAtValue)
-
- if (currentValue == bucketLimit)
- iterate(index + 1, currentValue, nrOfRecordings + countAtValue, (bucketLimit << 1) + 1, increment << 1)
- else
- iterate(index + 1, currentValue, nrOfRecordings + countAtValue, bucketLimit, increment)
- } else {
- nrOfRecordings
- }
- }
-
- val nrOfRecordings = iterate(0, -1, 0, subBucketMask, 1)
-
- def tryUpdateTotalCount: Boolean = {
- val previousTotalCount = getTotalCount
- val newTotalCount = previousTotalCount - nrOfRecordings
-
- totalCountUpdater.compareAndSet(this, previousTotalCount, newTotalCount)
- }
-
- while (!tryUpdateTotalCount) {}
-
- MetricSnapshot(InstrumentTypes.Histogram, nrOfRecordings, scale, entries.result())
- }
-
-}
-
-object HdrRecorder {
- val totalCountUpdater = AtomicLongFieldUpdater.newUpdater(classOf[AtomicHistogram], "totalCount")
-
- def apply(highestTrackableValue: Long, significantValueDigits: Int, scale: Scale): HdrRecorder =
- new HdrRecorder(highestTrackableValue, significantValueDigits, scale)
-
-}
diff --git a/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala
deleted file mode 100644
index ba2550af..00000000
--- a/kamon-core/src/main/scala/kamon/metrics/instruments/MinMaxCounter.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-package kamon.metrics.instruments
-
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-import java.lang.Math._
-import jsr166e.LongMaxUpdater
-import kamon.util.PaddedAtomicLong
-import kamon.metrics.instruments.MinMaxCounter.CounterMeasurement
-
-class MinMaxCounter {
- private val min = new LongMaxUpdater
- private val max = new LongMaxUpdater
- private val sum = new PaddedAtomicLong
-
- min.update(0L)
- max.update(0L)
-
- def increment(value: Long = 1L): Unit = {
- val currentValue = sum.addAndGet(value)
- max.update(currentValue)
- }
-
- def decrement(value: Long = 1L): Unit = {
- val currentValue = sum.addAndGet(-value)
- min.update(-currentValue)
- }
-
- def collect(): CounterMeasurement = {
- val currentValue = {
- val value = sum.get()
- if (value < 0) 0 else value
- }
- val result = CounterMeasurement(abs(min.maxThenReset()), max.maxThenReset(), currentValue)
- max.update(currentValue)
- min.update(-currentValue)
- result
- }
-}
-
-object MinMaxCounter {
- def apply() = new MinMaxCounter()
-
- case class CounterMeasurement(min: Long, max: Long, current: Long)
-}
diff --git a/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala b/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
new file mode 100644
index 00000000..490bc127
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
@@ -0,0 +1,61 @@
+package kamon.standalone
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.Kamon
+import kamon.metric.UserMetrics
+import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
+
+import scala.concurrent.duration.FiniteDuration
+
+trait KamonStandalone {
+ private[kamon] def system: ActorSystem
+
+ def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram =
+ Kamon(UserMetrics)(system).registerHistogram(name, precision, highestTrackableValue)
+
+ def registerHistogram(name: String): Histogram =
+ Kamon(UserMetrics)(system).registerHistogram(name)
+
+ def registerCounter(name: String): Counter =
+ Kamon(UserMetrics)(system).registerCounter(name)
+
+ def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration): MinMaxCounter =
+ Kamon(UserMetrics)(system).registerMinMaxCounter(name, precision, highestTrackableValue, refreshInterval)
+
+ def registerMinMaxCounter(name: String): MinMaxCounter =
+ Kamon(UserMetrics)(system).registerMinMaxCounter(name)
+
+ def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ Kamon(UserMetrics)(system).registerGauge(name)(currentValueCollector)
+
+ def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
+ refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
+ Kamon(UserMetrics)(system).registerGauge(name, precision, highestTrackableValue, refreshInterval)(currentValueCollector)
+
+ def removeHistogram(name: String): Unit =
+ Kamon(UserMetrics)(system).removeHistogram(name)
+
+ def removeCounter(name: String): Unit =
+ Kamon(UserMetrics)(system).removeCounter(name)
+
+ def removeMinMaxCounter(name: String): Unit =
+ Kamon(UserMetrics)(system).removeMinMaxCounter(name)
+
+ def removeGauge(name: String): Unit =
+ Kamon(UserMetrics)(system).removeGauge(name)
+}
+
+object KamonStandalone {
+
+ def buildFromConfig(config: Config): KamonStandalone = buildFromConfig(config, "kamon-standalone")
+
+ def buildFromConfig(config: Config, actorSystemName: String): KamonStandalone = new KamonStandalone {
+ val system: ActorSystem = ActorSystem(actorSystemName, config)
+ }
+}
+
+object EmbeddedKamonStandalone extends KamonStandalone {
+ private[kamon] lazy val system = ActorSystem("kamon-standalone")
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
index 307cf17a..9ce3cd4e 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
@@ -18,11 +18,11 @@ package kamon.trace
import akka.actor.ActorSystem
import kamon.Kamon
-import kamon.metrics._
+import kamon.metric._
import java.util.concurrent.ConcurrentLinkedQueue
import kamon.trace.TraceContextAware.DefaultTraceContextAware
import kamon.trace.TraceContext.SegmentIdentity
-import kamon.metrics.TraceMetrics.TraceMetricRecorder
+import kamon.metric.TraceMetrics.TraceMetricRecorder
trait TraceContext {
def name: String
@@ -41,7 +41,7 @@ object TraceContext {
}
trait SegmentCompletionHandle {
- def finish(metadata: Map[String, String])
+ def finish(metadata: Map[String, String] = Map.empty)
}
case class SegmentData(identity: MetricIdentity, duration: Long, metadata: Map[String, String])
@@ -76,7 +76,7 @@ object SegmentCompletionHandleAware {
}
class SimpleMetricCollectionContext(@volatile private var _name: String, val token: String, metadata: Map[String, String],
- val system: ActorSystem) extends TraceContext {
+ val system: ActorSystem) extends TraceContext {
@volatile private var _isOpen = true
val levelOfDetail = OnlyMetrics
val startMark = System.nanoTime()
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala b/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
index 0e264cd2..0b3118ed 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
@@ -16,7 +16,10 @@
package kamon.trace
+import scala.language.experimental.macros
import java.util.concurrent.atomic.AtomicLong
+import kamon.macros.InlineTraceContextMacro
+
import scala.util.Try
import java.net.InetAddress
import akka.actor.ActorSystem
@@ -33,7 +36,7 @@ object TraceRecorder {
def newToken = "%s-%s".format(hostnamePrefix, tokenCounter.incrementAndGet())
private def newTraceContext(name: String, token: Option[String], metadata: Map[String, String],
- system: ActorSystem): TraceContext = {
+ system: ActorSystem): TraceContext = {
// In the future this should select between implementations.
val finalToken = token.getOrElse(newToken)
@@ -51,7 +54,7 @@ object TraceRecorder {
traceContextStorage.set(Some(ctx))
}
- def startSegment(identity: SegmentIdentity, metadata: Map[String, String]): Option[SegmentCompletionHandle] =
+ def startSegment(identity: SegmentIdentity, metadata: Map[String, String] = Map.empty): Option[SegmentCompletionHandle] =
currentContext.map(_.startSegment(identity, metadata))
def rename(name: String): Unit = currentContext.map(_.rename(name))
@@ -66,6 +69,8 @@ object TraceRecorder {
try thunk finally setContext(oldContext)
}
+ def withInlineTraceContextReplacement[T](traceCtx: Option[TraceContext])(thunk: ⇒ T): T = macro InlineTraceContextMacro.withInlineTraceContextImpl[T, Option[TraceContext]]
+
def finish(metadata: Map[String, String] = Map.empty): Unit = currentContext.map(_.finish(metadata))
}
diff --git a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorMessagePassingTracingSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
index 4e62c9f7..0f682500 100644
--- a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorMessagePassingTracingSpec.scala
+++ b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
@@ -13,19 +13,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.trace.instrumentation
-
-import org.scalatest.WordSpecLike
-import akka.actor.{ Actor, Props, ActorSystem }
+package kamon.instrumentation.akka
+import akka.actor.{ Actor, ActorSystem, Props }
+import akka.pattern.{ ask, pipe }
+import akka.routing.RoundRobinPool
import akka.testkit.{ ImplicitSender, TestKit }
-import kamon.trace.TraceRecorder
-import akka.pattern.{ pipe, ask }
import akka.util.Timeout
+import kamon.trace.TraceRecorder
+import org.scalatest.WordSpecLike
+
import scala.concurrent.duration._
-import akka.routing.{ RoundRobinPool }
-class ActorMessagePassingTracingSpec extends TestKit(ActorSystem("actor-message-passing-tracing-spec")) with WordSpecLike with ImplicitSender {
+class ActorCellInstrumentationSpec extends TestKit(ActorSystem("actor-cell-instrumentation-spec")) with WordSpecLike
+ with ImplicitSender {
+
implicit val executionContext = system.dispatcher
"the message passing instrumentation" should {
diff --git a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorLoggingSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala
index 81fd9cbc..3dab44bc 100644
--- a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorLoggingSpec.scala
+++ b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala
@@ -13,15 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.trace.instrumentation
+package kamon.instrumentation.akka
-import akka.testkit.TestKit
-import org.scalatest.{ Inspectors, Matchers, WordSpecLike }
-import akka.actor.{ Props, ActorLogging, Actor, ActorSystem }
+import akka.actor.{ Actor, ActorLogging, ActorSystem, Props }
import akka.event.Logging.LogEvent
+import akka.testkit.TestKit
import kamon.trace.{ TraceContextAware, TraceRecorder }
+import org.scalatest.{ Inspectors, Matchers, WordSpecLike }
-class ActorLoggingSpec extends TestKit(ActorSystem("actor-logging-spec")) with WordSpecLike with Matchers with Inspectors {
+class ActorLoggingInstrumentationSpec extends TestKit(ActorSystem("actor-logging-instrumentation-spec")) with WordSpecLike
+ with Matchers with Inspectors {
"the ActorLogging instrumentation" should {
"attach the TraceContext (if available) to log events" in {
diff --git a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorSystemMessagePassingInstrumentationSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala
index ed239b38..47867c55 100644
--- a/kamon-core/src/test/scala/kamon/trace/instrumentation/ActorSystemMessagePassingInstrumentationSpec.scala
+++ b/kamon-core/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala
@@ -1,14 +1,17 @@
-package kamon.trace.instrumentation
+package kamon.instrumentation.akka
-import akka.testkit.{ ImplicitSender, TestKit }
+import akka.actor.SupervisorStrategy.{ Escalate, Restart, Resume, Stop }
import akka.actor._
-import org.scalatest.WordSpecLike
+import akka.testkit.{ ImplicitSender, TestKit }
import kamon.trace.TraceRecorder
-import scala.util.control.NonFatal
-import akka.actor.SupervisorStrategy.{ Escalate, Stop, Restart, Resume }
+import org.scalatest.WordSpecLike
+
import scala.concurrent.duration._
+import scala.util.control.NonFatal
+
+class ActorSystemMessageInstrumentationSpec extends TestKit(ActorSystem("actor-system-message-instrumentation-spec"))
+ with WordSpecLike with ImplicitSender {
-class ActorSystemMessagePassingInstrumentationSpec extends TestKit(ActorSystem("actor-message-passing-tracing-spec")) with WordSpecLike with ImplicitSender {
implicit val executionContext = system.dispatcher
"the system message passing instrumentation" should {
@@ -107,7 +110,7 @@ class ActorSystemMessagePassingInstrumentationSpec extends TestKit(ActorSystem("
}
def supervisorWithDirective(directive: SupervisorStrategy.Directive, sendPreRestart: Boolean = false, sendPostRestart: Boolean = false,
- sendPostStop: Boolean = false, sendPreStart: Boolean = false): ActorRef = {
+ sendPostStop: Boolean = false, sendPreStart: Boolean = false): ActorRef = {
class GrandParent extends Actor {
val child = context.actorOf(Props(new Parent))
diff --git a/kamon-core/src/test/scala/kamon/trace/instrumentation/AskPatternTracingSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
index fb886de6..d914ffe8 100644
--- a/kamon-core/src/test/scala/kamon/trace/instrumentation/AskPatternTracingSpec.scala
+++ b/kamon-core/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
@@ -14,19 +14,20 @@
* =========================================================================================
*/
-package kamon.trace.instrumentation
+package kamon.instrumentation.akka
-import akka.testkit.TestKitBase
-import akka.actor.{ Props, Actor, ActorSystem }
-import org.scalatest.{ Matchers, WordSpecLike }
+import akka.actor.{ Actor, ActorSystem, Props }
import akka.event.Logging.Warning
-import scala.concurrent.duration._
import akka.pattern.ask
+import akka.testkit.TestKitBase
import akka.util.Timeout
-import kamon.trace.{ TraceContextAware, TraceRecorder }
import com.typesafe.config.ConfigFactory
+import kamon.trace.{ TraceContextAware, TraceRecorder }
+import org.scalatest.{ Matchers, WordSpecLike }
+
+import scala.concurrent.duration._
-class AskPatternTracingSpec extends TestKitBase with WordSpecLike with Matchers {
+class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers {
implicit lazy val system: ActorSystem = ActorSystem("ask-pattern-tracing-spec", ConfigFactory.parseString(
"""
|kamon {
diff --git a/kamon-core/src/test/scala/kamon/trace/instrumentation/FutureTracingSpec.scala b/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala
index b1765fd8..31afd3ff 100644
--- a/kamon-core/src/test/scala/kamon/trace/instrumentation/FutureTracingSpec.scala
+++ b/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala
@@ -13,16 +13,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.trace.instrumentation
+package kamon.instrumentation.scala
-import scala.concurrent.{ ExecutionContext, Future }
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
-import org.scalatest.concurrent.{ ScalaFutures, PatienceConfiguration }
-import kamon.trace.TraceRecorder
-import akka.testkit.TestKit
import akka.actor.ActorSystem
+import akka.testkit.TestKit
+import kamon.trace.TraceRecorder
+import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
+import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
+
+import scala.concurrent.Future
-class FutureTracingSpec extends TestKit(ActorSystem("actor-message-passing-tracing-spec")) with WordSpecLike with Matchers
+class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentation-spec")) with WordSpecLike with Matchers
with ScalaFutures with PatienceConfiguration with OptionValues {
implicit val execContext = system.dispatcher
diff --git a/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala
new file mode 100644
index 00000000..a05dc344
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/ActorMetricsSpec.scala
@@ -0,0 +1,205 @@
+/* =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import java.nio.LongBuffer
+
+import akka.instrumentation.ActorCellMetrics
+import kamon.metric.ActorMetricsTestActor._
+import kamon.metric.instrument.Histogram.MutableRecord
+import org.scalatest.{ WordSpecLike, Matchers }
+import akka.testkit.{ ImplicitSender, TestProbe, TestKitBase }
+import akka.actor._
+import com.typesafe.config.ConfigFactory
+import scala.concurrent.duration._
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.ActorMetrics.{ ActorMetricsRecorder, ActorMetricSnapshot }
+
+class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters = [
+ | {
+ | actor {
+ | includes = [ "user/tracked-*", "user/measuring-*", "user/clean-after-collect" ]
+ | excludes = [ "user/tracked-explicitly-excluded"]
+ | }
+ | }
+ | ]
+ | precision {
+ | default-histogram-precision {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ |
+ | default-min-max-counter-precision {
+ | refresh-interval = 1 second
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "the Kamon actor metrics" should {
+ "respect the configured include and exclude filters" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("tracked-actor")
+ actorMetricsRecorderOf(trackedActor) should not be empty
+
+ val nonTrackedActor = createTestActor("non-tracked-actor")
+ actorMetricsRecorderOf(nonTrackedActor) shouldBe empty
+
+ val trackedButExplicitlyExcluded = createTestActor("tracked-explicitly-excluded")
+ actorMetricsRecorderOf(trackedButExplicitlyExcluded) shouldBe empty
+ }
+
+ "reset all recording instruments after taking a snapshot" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("clean-after-collect")
+ val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
+ for (i ← 1 to 100) {
+ trackedActor ! Discard
+ }
+ trackedActor ! Fail
+ trackedActor ! TrackTimings(sleep = Some(1 second))
+ expectMsgType[TrackedTimings]
+
+ val firstSnapshot = takeSnapshotOf(trackedActorMetrics)
+ firstSnapshot.errors.count should be(1L)
+ firstSnapshot.mailboxSize.numberOfMeasurements should be > 0L
+ firstSnapshot.processingTime.numberOfMeasurements should be(103L) // 102 examples + Initialize message
+ firstSnapshot.timeInMailbox.numberOfMeasurements should be(103L) // 102 examples + Initialize message
+
+ val secondSnapshot = takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
+ secondSnapshot.errors.count should be(0L)
+ secondSnapshot.mailboxSize.numberOfMeasurements should be <= 3L
+ secondSnapshot.processingTime.numberOfMeasurements should be(0L) // 102 examples + Initialize message
+ secondSnapshot.timeInMailbox.numberOfMeasurements should be(0L) // 102 examples + Initialize message
+ }
+
+ "record the processing-time of the receive function" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-processing-time")
+ val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
+ takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
+
+ trackedActor ! TrackTimings(sleep = Some(1 second))
+ val timings = expectMsgType[TrackedTimings]
+ val snapshot = takeSnapshotOf(trackedActorMetrics)
+
+ snapshot.processingTime.numberOfMeasurements should be(1L)
+ snapshot.processingTime.recordsIterator.next().count should be(1L)
+ snapshot.processingTime.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ }
+
+ "record the number of errors" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-errors")
+ val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
+ takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
+
+ for (i ← 1 to 10) { trackedActor ! Fail }
+ trackedActor ! Ping
+ expectMsg(Pong)
+ val snapshot = takeSnapshotOf(trackedActorMetrics)
+
+ snapshot.errors.count should be(10)
+ }
+
+ "record the mailbox-size" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-mailbox-size")
+ val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
+ takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
+
+ trackedActor ! TrackTimings(sleep = Some(1 second))
+ for (i ← 1 to 10) {
+ trackedActor ! Discard
+ }
+ trackedActor ! Ping
+
+ val timings = expectMsgType[TrackedTimings]
+ expectMsg(Pong)
+ val snapshot = takeSnapshotOf(trackedActorMetrics)
+
+ snapshot.mailboxSize.min should be(0L)
+ snapshot.mailboxSize.max should be(11L +- 1L)
+ }
+
+ "record the time-in-mailbox" in new ActorMetricsFixtures {
+ val trackedActor = createTestActor("measuring-time-in-mailbox")
+ val trackedActorMetrics = actorMetricsRecorderOf(trackedActor).get
+ takeSnapshotOf(trackedActorMetrics) // Ensure that the recorders are clean
+
+ trackedActor ! TrackTimings(sleep = Some(1 second))
+ val timings = expectMsgType[TrackedTimings]
+ val snapshot = takeSnapshotOf(trackedActorMetrics)
+
+ snapshot.timeInMailbox.numberOfMeasurements should be(1L)
+ snapshot.timeInMailbox.recordsIterator.next().count should be(1L)
+ snapshot.timeInMailbox.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ }
+ }
+
+ trait ActorMetricsFixtures {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetricsRecorder] = {
+ val initialisationListener = TestProbe()
+ ref.tell(Ping, initialisationListener.ref)
+ initialisationListener.expectMsg(Pong)
+
+ val underlyingCellField = ref.getClass.getDeclaredMethod("underlying")
+ val cell = underlyingCellField.invoke(ref).asInstanceOf[ActorCellMetrics]
+
+ cell.actorMetricsRecorder
+ }
+
+ def createTestActor(name: String): ActorRef = system.actorOf(Props[ActorMetricsTestActor], name)
+
+ def takeSnapshotOf(amr: ActorMetricsRecorder): ActorMetricSnapshot = amr.collect(collectionContext)
+ }
+}
+
+class ActorMetricsTestActor extends Actor {
+ def receive = {
+ case Discard ⇒
+ case Fail ⇒ 1 / 0
+ case Ping ⇒ sender ! Pong
+ case TrackTimings(sendTimestamp, sleep) ⇒ {
+ val dequeueTimestamp = System.nanoTime()
+ sleep.map(s ⇒ Thread.sleep(s.toMillis))
+ val afterReceiveTimestamp = System.nanoTime()
+
+ sender ! TrackedTimings(sendTimestamp, dequeueTimestamp, afterReceiveTimestamp)
+ }
+ }
+}
+
+object ActorMetricsTestActor {
+ case object Ping
+ case object Pong
+ case object Fail
+ case object Discard
+
+ case class TrackTimings(sendTimestamp: Long = System.nanoTime(), sleep: Option[Duration] = None)
+ case class TrackedTimings(sendTimestamp: Long, dequeueTimestamp: Long, afterReceiveTimestamp: Long) {
+ def approximateTimeInMailbox: Long = dequeueTimestamp - sendTimestamp
+ def approximateProcessingTime: Long = afterReceiveTimestamp - dequeueTimestamp
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metrics/DispatcherMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
index 2a9cb6b4..ae324b73 100644
--- a/kamon-core/src/test/scala/kamon/metrics/DispatcherMetricsSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
@@ -13,7 +13,7 @@
* =========================================================================================
*/
-package kamon.metrics
+package kamon.metric
import org.scalatest.{ WordSpecLike, Matchers }
import akka.testkit.{ TestProbe, TestKitBase }
@@ -21,13 +21,16 @@ import akka.actor.{ ActorRef, Props, ActorSystem }
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
import kamon.Kamon
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.DispatcherMetrics.DispatcherMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.DispatcherMetrics.DispatcherMetricSnapshot
class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
implicit lazy val system: ActorSystem = ActorSystem("dispatcher-metrics-spec", ConfigFactory.parseString(
"""
|kamon.metrics {
+ | tick-interval = 1 second
+ | default-collection-context-buffer-size = 10
+ |
| filters = [
| {
| dispatcher {
@@ -52,8 +55,8 @@ class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers
"the Kamon dispatcher metrics" should {
"respect the configured include and exclude filters" in {
- system.actorOf(Props[DelayableActor].withDispatcher("tracked-dispatcher"), "actor-with-tracked-dispatcher")
- system.actorOf(Props[DelayableActor].withDispatcher("dispatcher-explicitly-excluded"), "actor-with-excluded-dispatcher")
+ system.actorOf(Props[ActorMetricsTestActor].withDispatcher("tracked-dispatcher"), "actor-with-tracked-dispatcher")
+ system.actorOf(Props[ActorMetricsTestActor].withDispatcher("dispatcher-explicitly-excluded"), "actor-with-excluded-dispatcher")
Kamon(Metrics).subscribe(DispatcherMetrics, "*", testActor, permanently = true)
expectMsgType[TickMetricSnapshot]
@@ -69,7 +72,7 @@ class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers
val (delayable, metricsListener) = delayableActor("worker-actor", "tracked-dispatcher")
for (_ ← 1 to 100) {
- delayable ! Discard
+ //delayable ! Discard
}
val dispatcherMetrics = expectDispatcherMetrics("tracked-dispatcher", metricsListener, 3 seconds)
@@ -92,7 +95,7 @@ class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers
trait DelayableActorFixture {
def delayableActor(name: String, dispatcher: String): (ActorRef, TestProbe) = {
- val actor = system.actorOf(Props[DelayableActor].withDispatcher(dispatcher), name)
+ val actor = system.actorOf(Props[ActorMetricsTestActor].withDispatcher(dispatcher), name)
val metricsListener = TestProbe()
Kamon(Metrics).subscribe(DispatcherMetrics, "*", metricsListener.ref, permanently = true)
diff --git a/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
new file mode 100644
index 00000000..9144725e
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
@@ -0,0 +1,133 @@
+package kamon.metric
+
+import akka.actor._
+import akka.testkit.{ TestProbe, ImplicitSender, TestKitBase }
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import org.scalatest.{ Matchers, WordSpecLike }
+import scala.concurrent.duration._
+
+class SubscriptionsProtocolSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("subscriptions-protocol-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ |}
+ """.stripMargin))
+
+ val metricsExtension = Kamon(Metrics)(system)
+ import metricsExtension.{ register, subscribe, unsubscribe }
+
+ "the Subscriptions messaging protocol" should {
+ "allow subscribing for a single tick" in {
+ val subscriber = TestProbe()
+ register(TraceMetrics("one-shot"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = false)
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ subscriber.expectNoMsg(1 second)
+ }
+
+ "allow subscribing permanently to a metric" in {
+ val subscriber = TestProbe()
+ register(TraceMetrics("permanent"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "permanent", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("permanent"))
+ subscriber.expectNoMsg(1 second)
+ }
+ }
+
+ "allow subscribing to metrics matching a glob pattern" in {
+ val subscriber = TestProbe()
+ register(TraceMetrics("include-one"), TraceMetrics.Factory)
+ register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
+ register(TraceMetrics("include-three"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "include-*", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(2)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
+ tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
+ subscriber.expectNoMsg(1 second)
+ }
+ }
+
+ "send a single TickMetricSnapshot to each subscriber, even if subscribed multiple times" in {
+ val subscriber = TestProbe()
+ register(TraceMetrics("include-one"), TraceMetrics.Factory)
+ register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
+ register(TraceMetrics("include-three"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "include-one", subscriber.ref, permanently = true)
+ subscribe(TraceMetrics, "include-three", subscriber.ref, permanently = true)
+
+ for (repetition ← 1 to 5) {
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+
+ tickSnapshot.metrics.size should be(2)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
+ tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
+ }
+ }
+
+ "allow un-subscribing a subscriber" in {
+ val subscriber = TestProbe()
+ register(TraceMetrics("one-shot"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = true)
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+
+ unsubscribe(subscriber.ref)
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ subscriber.expectNoMsg(1 second)
+ }
+
+ "watch all subscribers and un-subscribe them if they die" in {
+ val subscriber = TestProbe()
+ val forwarderSubscriber = system.actorOf(Props(new ForwarderSubscriber(subscriber.ref)))
+ watch(forwarderSubscriber)
+ register(TraceMetrics("one-shot"), TraceMetrics.Factory)
+ subscribe(TraceMetrics, "one-shot", forwarderSubscriber, permanently = true)
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
+ tickSnapshot.metrics.size should be(1)
+ tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+
+ forwarderSubscriber ! PoisonPill
+ expectTerminated(forwarderSubscriber)
+
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ subscriber.expectNoMsg(2 seconds)
+ }
+ }
+}
+
+class ForwarderSubscriber(target: ActorRef) extends Actor {
+ def receive = {
+ case anything ⇒ target.forward(anything)
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metrics/TickMetricSnapshotBufferSpec.scala b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
index d0a0c707..79c9c63b 100644
--- a/kamon-core/src/test/scala/kamon/metrics/TickMetricSnapshotBufferSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
@@ -14,16 +14,35 @@
* =========================================================================================
*/
-package kamon.metrics
+package kamon.metric
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.instrument.Histogram
+import kamon.metric.instrument.Histogram.MutableRecord
import org.scalatest.{ Matchers, WordSpecLike }
-import akka.testkit.TestKit
+import akka.testkit.{ ImplicitSender, TestKitBase }
import akka.actor.ActorSystem
import scala.concurrent.duration._
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.MetricSnapshot.Measurement
-
-class TickMetricSnapshotBufferSpec extends TestKit(ActorSystem("tick-metric-snapshot-buffer")) with WordSpecLike with Matchers {
+import kamon.metric.Subscriptions.TickMetricSnapshot
+
+class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters = [
+ | {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ | ]
+ |}
+ """.stripMargin))
"the TickMetricSnapshotBuffer" should {
"merge TickMetricSnapshots received until the flush timeout is reached and fix the from/to fields" in new SnapshotFixtures {
@@ -55,27 +74,38 @@ class TickMetricSnapshotBufferSpec extends TestKit(ActorSystem("tick-metric-snap
mergedSnapshot.to should equal(4000)
mergedSnapshot.metrics should not be ('empty)
- val testMetricSnapshot = mergedSnapshot.metrics(CustomMetric("test-metric")).metrics(CustomMetric.RecordedValues)
- testMetricSnapshot.min should equal(1)
- testMetricSnapshot.max should equal(10)
- testMetricSnapshot.numberOfMeasurements should equal(35)
- testMetricSnapshot.measurements should contain allOf (Measurement(1, 10), Measurement(4, 9), Measurement(10, 16))
+ val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).metrics(TraceMetrics.ElapsedTime).asInstanceOf[Histogram.Snapshot]
+ testMetricSnapshot.min should equal(10)
+ testMetricSnapshot.max should equal(300)
+ testMetricSnapshot.numberOfMeasurements should equal(6)
+ testMetricSnapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(10, 3),
+ MutableRecord(20, 1),
+ MutableRecord(30, 1),
+ MutableRecord(300, 1))
}
}
trait SnapshotFixtures {
+ val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
+ val testTraceIdentity = TraceMetrics("buffer-spec-test-trace")
+ val traceRecorder = Kamon(Metrics).register(testTraceIdentity, TraceMetrics.Factory).get
+
val firstEmpty = TickMetricSnapshot(1000, 2000, Map.empty)
val secondEmpty = TickMetricSnapshot(2000, 3000, Map.empty)
val thirdEmpty = TickMetricSnapshot(3000, 4000, Map.empty)
- val firstNonEmpty = TickMetricSnapshot(1000, 2000,
- Map((CustomMetric("test-metric") -> SimpleGroupSnapshot(Map(CustomMetric.RecordedValues -> MetricSnapshot(InstrumentTypes.Histogram, 20, Scale.Unit, Vector(Measurement(1, 10), Measurement(10, 10))))))))
-
- val secondNonEmpty = TickMetricSnapshot(1000, 2000,
- Map((CustomMetric("test-metric") -> SimpleGroupSnapshot(Map(CustomMetric.RecordedValues -> MetricSnapshot(InstrumentTypes.Histogram, 15, Scale.Unit, Vector(Measurement(4, 9), Measurement(10, 6))))))))
-
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(20L)
+ traceRecorder.elapsedTime.record(30L)
+ val firstNonEmpty = TickMetricSnapshot(1000, 2000, Map(
+ (testTraceIdentity -> traceRecorder.collect(collectionContext))))
+
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(10L)
+ traceRecorder.elapsedTime.record(300L)
+ val secondNonEmpty = TickMetricSnapshot(1000, 2000, Map(
+ (testTraceIdentity -> traceRecorder.collect(collectionContext))))
}
-
- case class SimpleGroupSnapshot(metrics: Map[MetricIdentity, MetricSnapshotLike]) extends MetricGroupSnapshot
}
diff --git a/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
new file mode 100644
index 00000000..8a87408d
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
@@ -0,0 +1,95 @@
+package kamon.metric
+
+import akka.actor.ActorSystem
+import akka.testkit.{ ImplicitSender, TestKitBase }
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.TraceMetrics.TraceMetricsSnapshot
+import kamon.trace.TraceContext.SegmentIdentity
+import kamon.trace.TraceRecorder
+import org.scalatest.{ Matchers, WordSpecLike }
+
+class TraceMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters = [
+ | {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ | ]
+ | precision {
+ | default-histogram-precision {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ |
+ | default-min-max-counter-precision {
+ | refresh-interval = 1 second
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "the TraceMetrics" should {
+ "record the elapsed time between a trace creation and finish" in {
+ for (repetitions ← 1 to 10) {
+ TraceRecorder.withNewTraceContext("record-elapsed-time") {
+ TraceRecorder.finish()
+ }
+ }
+
+ val snapshot = takeSnapshotOf("record-elapsed-time")
+ snapshot.elapsedTime.numberOfMeasurements should be(10)
+ snapshot.segments shouldBe empty
+ }
+
+ "record the elapsed time for segments that occur inside a given trace" in {
+ TraceRecorder.withNewTraceContext("trace-with-segments") {
+ val segmentHandle = TraceRecorder.startSegment(TraceMetricsTestSegment("test-segment"))
+ segmentHandle.get.finish()
+ TraceRecorder.finish()
+ }
+
+ val snapshot = takeSnapshotOf("trace-with-segments")
+ snapshot.elapsedTime.numberOfMeasurements should be(1)
+ snapshot.segments.size should be(1)
+ snapshot.segments(TraceMetricsTestSegment("test-segment")).numberOfMeasurements should be(1)
+ }
+
+ "record the elapsed time for segments that finish after their correspondent trace has finished" in {
+ val segmentHandle = TraceRecorder.withNewTraceContext("closing-segment-after-trace") {
+ val sh = TraceRecorder.startSegment(TraceMetricsTestSegment("test-segment"))
+ TraceRecorder.finish()
+ sh
+ }
+
+ val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
+ beforeFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(1)
+ beforeFinishSegmentSnapshot.segments.size should be(0)
+
+ segmentHandle.get.finish()
+
+ val afterFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
+ afterFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(0)
+ afterFinishSegmentSnapshot.segments.size should be(1)
+ afterFinishSegmentSnapshot.segments(TraceMetricsTestSegment("test-segment")).numberOfMeasurements should be(1)
+ }
+ }
+
+ case class TraceMetricsTestSegment(name: String) extends SegmentIdentity
+
+ def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
+ val recorder = Kamon(Metrics).register(TraceMetrics(traceName), TraceMetrics.Factory)
+ val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
+ recorder.get.collect(collectionContext)
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
new file mode 100644
index 00000000..8f1d06d8
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
@@ -0,0 +1,303 @@
+package kamon.metric
+
+import akka.actor.ActorSystem
+import akka.testkit.{ ImplicitSender, TestKitBase }
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.UserMetrics.{ UserGauge, UserMinMaxCounter, UserCounter, UserHistogram }
+import kamon.metric.instrument.Histogram
+import kamon.metric.instrument.Histogram.MutableRecord
+import org.scalatest.{ Matchers, WordSpecLike }
+import scala.concurrent.duration._
+
+class UserMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | precision {
+ | default-histogram-precision {
+ | highest-trackable-value = 10000
+ | significant-value-digits = 2
+ | }
+ |
+ | default-min-max-counter-precision {
+ | refresh-interval = 1 hour
+ | highest-trackable-value = 1000
+ | significant-value-digits = 2
+ | }
+ |
+ | default-gauge-precision {
+ | refresh-interval = 1 hour
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "the UserMetrics extension" should {
+ "allow registering a fully configured Histogram and get the same Histogram if registering again" in {
+ val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+ val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+
+ histogramA shouldBe theSameInstanceAs(histogramB)
+ }
+
+ "return the original Histogram when registering a fully configured Histogram for second time but with different settings" in {
+ val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+ val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Fine, 50000L)
+
+ histogramA shouldBe theSameInstanceAs(histogramB)
+ }
+
+ "allow registering a Histogram that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
+ }
+
+ "allow registering a Counter and get the same Counter if registering again" in {
+ val counterA = Kamon(UserMetrics).registerCounter("counter")
+ val counterB = Kamon(UserMetrics).registerCounter("counter")
+
+ counterA shouldBe theSameInstanceAs(counterB)
+ }
+
+ "allow registering a fully configured MinMaxCounter and get the same MinMaxCounter if registering again" in {
+ val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+ val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+
+ minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
+ }
+
+ "return the original MinMaxCounter when registering a fully configured MinMaxCounter for second time but with different settings" in {
+ val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+ val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Fine, 5000L, 1 second)
+
+ minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
+ }
+
+ "allow registering a MinMaxCounter that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-default-configuration")
+ }
+
+ "allow registering a fully configured Gauge and get the same Gauge if registering again" in {
+ val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ () ⇒ 1L
+ }
+
+ val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ () ⇒ 1L
+ }
+
+ gaugeA shouldBe theSameInstanceAs(gaugeB)
+ }
+
+ "return the original Gauge when registering a fully configured Gauge for second time but with different settings" in {
+ val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ () ⇒ 1L
+ }
+
+ val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Fine, 5000L, 1 second) {
+ () ⇒ 1L
+ }
+
+ gaugeA shouldBe theSameInstanceAs(gaugeB)
+ }
+
+ "allow registering a Gauge that takes the default configuration from the kamon.metrics.precision settings" in {
+ Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") {
+ () ⇒ 2L
+ }
+ }
+
+ "allow unregistering metrics from the extension" in {
+ val userMetricsRecorder = Kamon(Metrics).register(UserMetrics, UserMetrics.Factory).get
+ val counter = Kamon(UserMetrics).registerCounter("counter-for-remove")
+ val histogram = Kamon(UserMetrics).registerHistogram("histogram-for-remove")
+ val minMaxCounter = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-remove")
+ val gauge = Kamon(UserMetrics).registerGauge("gauge-for-remove") { () ⇒ 2L }
+
+ userMetricsRecorder.counters.keys should contain("counter-for-remove")
+ userMetricsRecorder.histograms.keys should contain("histogram-for-remove")
+ userMetricsRecorder.minMaxCounters.keys should contain("min-max-counter-for-remove")
+ userMetricsRecorder.gauges.keys should contain("gauge-for-remove")
+
+ Kamon(UserMetrics).removeCounter("counter-for-remove")
+ Kamon(UserMetrics).removeHistogram("histogram-for-remove")
+ Kamon(UserMetrics).removeMinMaxCounter("min-max-counter-for-remove")
+ Kamon(UserMetrics).removeGauge("gauge-for-remove")
+
+ userMetricsRecorder.counters.keys should not contain ("counter-for-remove")
+ userMetricsRecorder.histograms.keys should not contain ("histogram-for-remove")
+ userMetricsRecorder.minMaxCounters.keys should not contain ("min-max-counter-for-remove")
+ userMetricsRecorder.gauges.keys should not contain ("gauge-for-remove")
+ }
+
+ "generate a snapshot containing all the registered user metrics and reset all instruments" in {
+ val context = Kamon(Metrics).buildDefaultCollectionContext
+ val userMetricsRecorder = Kamon(Metrics).register(UserMetrics, UserMetrics.Factory).get
+
+ val histogramWithSettings = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+ val histogramWithDefaultConfiguration = Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
+ val counter = Kamon(UserMetrics).registerCounter("counter")
+ val minMaxCounterWithSettings = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+ val gauge = Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") { () ⇒ 2L }
+
+ // lets put some values on those metrics
+ histogramWithSettings.record(10)
+ histogramWithSettings.record(20, 100)
+ histogramWithDefaultConfiguration.record(40)
+
+ counter.increment()
+ counter.increment(16)
+
+ minMaxCounterWithSettings.increment(43)
+ minMaxCounterWithSettings.decrement()
+
+ gauge.record(15)
+
+ val firstSnapshot = userMetricsRecorder.collect(context)
+
+ firstSnapshot.histograms.size should be(2)
+ firstSnapshot.histograms.keys should contain allOf (
+ UserHistogram("histogram-with-settings"),
+ UserHistogram("histogram-with-default-configuration"))
+
+ firstSnapshot.histograms(UserHistogram("histogram-with-settings")).min shouldBe (10)
+ firstSnapshot.histograms(UserHistogram("histogram-with-settings")).max shouldBe (20)
+ firstSnapshot.histograms(UserHistogram("histogram-with-settings")).numberOfMeasurements should be(101)
+ firstSnapshot.histograms(UserHistogram("histogram-with-settings")).recordsIterator.toStream should contain allOf (
+ MutableRecord(10, 1),
+ MutableRecord(20, 100))
+
+ firstSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).min shouldBe (40)
+ firstSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).max shouldBe (40)
+ firstSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).numberOfMeasurements should be(1)
+ firstSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).recordsIterator.toStream should contain only (
+ MutableRecord(40, 1))
+
+ firstSnapshot.counters(UserCounter("counter")).count should be(17)
+
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).min shouldBe (0)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).max shouldBe (43)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).numberOfMeasurements should be(3)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min
+ MutableRecord(42, 1), // current
+ MutableRecord(43, 1)) // max
+
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).min shouldBe (0)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).max shouldBe (0)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).numberOfMeasurements should be(3)
+ firstSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).recordsIterator.toStream should contain only (
+ MutableRecord(0, 3)) // min, max and current
+
+ firstSnapshot.gauges(UserGauge("gauge-with-default-configuration")).min shouldBe (15)
+ firstSnapshot.gauges(UserGauge("gauge-with-default-configuration")).max shouldBe (15)
+ firstSnapshot.gauges(UserGauge("gauge-with-default-configuration")).numberOfMeasurements should be(1)
+ firstSnapshot.gauges(UserGauge("gauge-with-default-configuration")).recordsIterator.toStream should contain only (
+ MutableRecord(15, 1)) // only the manually recorded value
+
+ val secondSnapshot = userMetricsRecorder.collect(context)
+
+ secondSnapshot.histograms.size should be(2)
+ secondSnapshot.histograms.keys should contain allOf (
+ UserHistogram("histogram-with-settings"),
+ UserHistogram("histogram-with-default-configuration"))
+
+ secondSnapshot.histograms(UserHistogram("histogram-with-settings")).min shouldBe (0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-settings")).max shouldBe (0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-settings")).numberOfMeasurements should be(0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-settings")).recordsIterator.toStream shouldBe empty
+
+ secondSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).min shouldBe (0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).max shouldBe (0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).numberOfMeasurements should be(0)
+ secondSnapshot.histograms(UserHistogram("histogram-with-default-configuration")).recordsIterator.toStream shouldBe empty
+
+ secondSnapshot.counters(UserCounter("counter")).count should be(0)
+
+ secondSnapshot.minMaxCounters.size should be(2)
+ secondSnapshot.minMaxCounters.keys should contain allOf (
+ UserMinMaxCounter("min-max-counter-with-settings"),
+ UserMinMaxCounter("min-max-counter-with-default-configuration"))
+
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).min shouldBe (42)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).max shouldBe (42)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).numberOfMeasurements should be(3)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-settings")).recordsIterator.toStream should contain only (
+ MutableRecord(42, 3)) // min, max and current
+
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).min shouldBe (0)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).max shouldBe (0)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).numberOfMeasurements should be(3)
+ secondSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-with-default-configuration")).recordsIterator.toStream should contain only (
+ MutableRecord(0, 3)) // min, max and current
+
+ secondSnapshot.gauges(UserGauge("gauge-with-default-configuration")).min shouldBe (0)
+ secondSnapshot.gauges(UserGauge("gauge-with-default-configuration")).max shouldBe (0)
+ secondSnapshot.gauges(UserGauge("gauge-with-default-configuration")).numberOfMeasurements should be(0)
+ secondSnapshot.gauges(UserGauge("gauge-with-default-configuration")).recordsIterator shouldBe empty
+
+ }
+
+ "generate a snapshot that can be merged with another" in {
+ val context = Kamon(Metrics).buildDefaultCollectionContext
+ val userMetricsRecorder = Kamon(Metrics).register(UserMetrics, UserMetrics.Factory).get
+
+ val histogram = Kamon(UserMetrics).registerHistogram("histogram-for-merge")
+ val counter = Kamon(UserMetrics).registerCounter("counter-for-merge")
+ val minMaxCounter = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-merge")
+ val gauge = Kamon(UserMetrics).registerGauge("gauge-for-merge") { () ⇒ 10L }
+
+ histogram.record(100)
+ counter.increment(10)
+ minMaxCounter.increment(50)
+ minMaxCounter.decrement(10)
+ gauge.record(50)
+
+ val firstSnapshot = userMetricsRecorder.collect(context)
+
+ val extraCounter = Kamon(UserMetrics).registerCounter("extra-counter")
+ histogram.record(200)
+ extraCounter.increment(20)
+ minMaxCounter.increment(40)
+ minMaxCounter.decrement(50)
+ gauge.record(70)
+
+ val secondSnapshot = userMetricsRecorder.collect(context)
+ val mergedSnapshot = firstSnapshot.merge(secondSnapshot, context)
+
+ mergedSnapshot.histograms.keys should contain(UserHistogram("histogram-for-merge"))
+
+ mergedSnapshot.histograms(UserHistogram("histogram-for-merge")).min shouldBe (100)
+ mergedSnapshot.histograms(UserHistogram("histogram-for-merge")).max shouldBe (200)
+ mergedSnapshot.histograms(UserHistogram("histogram-for-merge")).numberOfMeasurements should be(2)
+ mergedSnapshot.histograms(UserHistogram("histogram-for-merge")).recordsIterator.toStream should contain allOf (
+ MutableRecord(100, 1),
+ MutableRecord(200, 1))
+
+ mergedSnapshot.counters(UserCounter("counter-for-merge")).count should be(10)
+ mergedSnapshot.counters(UserCounter("extra-counter")).count should be(20)
+
+ mergedSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-for-merge")).min shouldBe (0)
+ mergedSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-for-merge")).max shouldBe (80)
+ mergedSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-for-merge")).numberOfMeasurements should be(6)
+ mergedSnapshot.minMaxCounters(UserMinMaxCounter("min-max-counter-for-merge")).recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min in first snapshot
+ MutableRecord(30, 2), // min and current in second snapshot
+ MutableRecord(40, 1), // current in first snapshot
+ MutableRecord(50, 1), // max in first snapshot
+ MutableRecord(80, 1)) // max in second snapshot
+
+ mergedSnapshot.gauges(UserGauge("gauge-for-merge")).min shouldBe (50)
+ mergedSnapshot.gauges(UserGauge("gauge-for-merge")).max shouldBe (70)
+ mergedSnapshot.gauges(UserGauge("gauge-for-merge")).numberOfMeasurements should be(2)
+ mergedSnapshot.gauges(UserGauge("gauge-for-merge")).recordsIterator.toStream should contain allOf (
+ MutableRecord(50, 1),
+ MutableRecord(70, 1))
+ }
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
new file mode 100644
index 00000000..1a93e1f6
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
@@ -0,0 +1,55 @@
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import kamon.metric.CollectionContext
+import org.scalatest.{ Matchers, WordSpec }
+
+class CounterSpec extends WordSpec with Matchers {
+
+ "a Counter" should {
+ "allow increment only operations" in new CounterFixture {
+ counter.increment()
+ counter.increment(10)
+
+ intercept[UnsupportedOperationException] {
+ counter.increment(-10)
+ }
+ }
+
+ "reset to zero when a snapshot is taken" in new CounterFixture {
+ counter.increment(100)
+ takeSnapshotFrom(counter).count should be(100)
+ takeSnapshotFrom(counter).count should be(0)
+ takeSnapshotFrom(counter).count should be(0)
+
+ counter.increment(50)
+ takeSnapshotFrom(counter).count should be(50)
+ takeSnapshotFrom(counter).count should be(0)
+ }
+
+ "produce a snapshot that can be merged with others" in new CounterFixture {
+ val counterA = Counter()
+ val counterB = Counter()
+ counterA.increment(100)
+ counterB.increment(200)
+
+ val counterASnapshot = takeSnapshotFrom(counterA)
+ val counterBSnapshot = takeSnapshotFrom(counterB)
+
+ counterASnapshot.merge(counterBSnapshot, collectionContext).count should be(300)
+ counterBSnapshot.merge(counterASnapshot, collectionContext).count should be(300)
+ }
+
+ }
+
+ trait CounterFixture {
+ val counter = Counter()
+
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(1)
+ }
+
+ def takeSnapshotFrom(counter: Counter): Counter.Snapshot = counter.collect(collectionContext)
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
new file mode 100644
index 00000000..9192d999
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
@@ -0,0 +1,72 @@
+package kamon.metric.instrument
+
+import java.util.concurrent.atomic.AtomicLong
+
+import akka.actor.ActorSystem
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.{ Metrics, Scale, CollectionContext }
+import org.scalatest.{ Matchers, WordSpecLike }
+import scala.concurrent.duration._
+
+class GaugeSpec extends WordSpecLike with Matchers {
+ implicit val system = ActorSystem("gauge-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | flush-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ | precision {
+ | default-gauge-precision {
+ | refresh-interval = 100 milliseconds
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "a Gauge" should {
+ "automatically record the current value using the configured refresh-interval" in {
+ val numberOfValuesRecorded = new AtomicLong(0)
+ val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
+
+ Thread.sleep(1.second.toMillis)
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ gauge.cleanup
+ }
+
+ "stop automatically recording after a call to cleanup" in {
+ val numberOfValuesRecorded = new AtomicLong(0)
+ val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
+
+ Thread.sleep(1.second.toMillis)
+ gauge.cleanup
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ Thread.sleep(1.second.toMillis)
+ numberOfValuesRecorded.get() should be(10L +- 1L)
+ }
+
+ "produce a Histogram snapshot including all the recorded values" in {
+ val numberOfValuesRecorded = new AtomicLong(0)
+ val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
+
+ Thread.sleep(1.second.toMillis)
+ gauge.cleanup
+ val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+
+ snapshot.numberOfMeasurements should be(10L +- 1L)
+ snapshot.min should be(1)
+ snapshot.max should be(10L +- 1L)
+ }
+
+ "not record the current value when doing a collection" in {
+ val numberOfValuesRecorded = new AtomicLong(0)
+ val gauge = Gauge(Histogram.Precision.Normal, 10000L, Scale.Unit, 1 hour, system)(() ⇒ numberOfValuesRecorded.addAndGet(1))
+
+ val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+
+ snapshot.numberOfMeasurements should be(0)
+ numberOfValuesRecorded.get() should be(0)
+ }
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
new file mode 100644
index 00000000..cefdf0f4
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
@@ -0,0 +1,130 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import com.typesafe.config.ConfigFactory
+import kamon.metric.CollectionContext
+import org.scalatest.{ Matchers, WordSpec }
+
+import scala.util.Random
+
+class HistogramSpec extends WordSpec with Matchers {
+
+ val histogramConfig = ConfigFactory.parseString(
+ """
+ |
+ |highest-trackable-value = 100000
+ |significant-value-digits = 2
+ |
+ """.stripMargin)
+
+ "a Histogram" should {
+ "allow record values within the configured range" in new HistogramFixture {
+ histogram.record(1000)
+ histogram.record(5000, count = 100)
+ histogram.record(10000)
+ }
+
+ "fail when recording values higher than the highest trackable value" in new HistogramFixture {
+ intercept[IndexOutOfBoundsException] {
+ histogram.record(1000000)
+ }
+ }
+
+ "reset all recorded levels to zero after a snapshot collection" in new HistogramFixture {
+ histogram.record(100)
+ histogram.record(200)
+ histogram.record(300)
+
+ takeSnapshot().numberOfMeasurements should be(3)
+ takeSnapshot().numberOfMeasurements should be(0)
+ }
+
+ "produce a snapshot" which {
+ "supports min, max and numberOfMeasurements operations" in new HistogramFixture {
+ histogram.record(100)
+ histogram.record(200, count = 200)
+ histogram.record(300)
+ histogram.record(900)
+
+ val snapshot = takeSnapshot()
+
+ snapshot.min should equal(100L +- 1L)
+ snapshot.max should equal(900L +- 9L)
+ snapshot.numberOfMeasurements should be(203)
+ }
+
+ "can be merged with another snapshot" in new MultipleHistogramFixture {
+ val random = new Random(System.nanoTime())
+
+ for (repetitions ← 1 to 1000) {
+ // Put some values on A and Control
+ for (_ ← 1 to 1000) {
+ val newRecording = random.nextInt(100000)
+ controlHistogram.record(newRecording)
+ histogramA.record(newRecording)
+ }
+
+ // Put some values on B and Control
+ for (_ ← 1 to 2000) {
+ val newRecording = random.nextInt(100000)
+ controlHistogram.record(newRecording)
+ histogramB.record(newRecording)
+ }
+
+ val controlSnapshot = takeSnapshotFrom(controlHistogram)
+ val histogramASnapshot = takeSnapshotFrom(histogramA)
+ val histogramBSnapshot = takeSnapshotFrom(histogramB)
+
+ assertEquals(controlSnapshot, histogramASnapshot.merge(histogramBSnapshot, collectionContext))
+ assertEquals(controlSnapshot, histogramBSnapshot.merge(histogramASnapshot, collectionContext))
+ }
+ }
+ }
+ }
+
+ trait HistogramFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ val histogram = Histogram.fromConfig(histogramConfig)
+
+ def takeSnapshot(): Histogram.Snapshot = histogram.collect(collectionContext)
+ }
+
+ trait MultipleHistogramFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
+ }
+
+ val controlHistogram = Histogram.fromConfig(histogramConfig)
+ val histogramA = Histogram.fromConfig(histogramConfig)
+ val histogramB = Histogram.fromConfig(histogramConfig)
+
+ def takeSnapshotFrom(histogram: Histogram): Histogram.Snapshot = histogram.collect(collectionContext)
+
+ def assertEquals(left: Histogram.Snapshot, right: Histogram.Snapshot): Unit = {
+ left.numberOfMeasurements should equal(right.numberOfMeasurements)
+ left.min should equal(right.min)
+ left.max should equal(right.max)
+ left.recordsIterator.toStream should contain theSameElementsAs (right.recordsIterator.toStream)
+ }
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
new file mode 100644
index 00000000..cb03664c
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
@@ -0,0 +1,108 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import akka.actor.ActorSystem
+import com.typesafe.config.ConfigFactory
+import kamon.metric.CollectionContext
+import kamon.metric.instrument.Histogram.MutableRecord
+import org.scalatest.{ Matchers, WordSpecLike }
+
+class MinMaxCounterSpec extends WordSpecLike with Matchers {
+ val system = ActorSystem("min-max-counter-spec")
+ val minMaxCounterConfig = ConfigFactory.parseString(
+ """
+ |refresh-interval = 1 hour
+ |highest-trackable-value = 1000
+ |significant-value-digits = 2
+ """.stripMargin)
+
+ "the MinMaxCounter" should {
+ "track ascending tendencies" in new MinMaxCounterFixture {
+ mmCounter.increment()
+ mmCounter.increment(3)
+ mmCounter.increment()
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(5)
+ snapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min
+ MutableRecord(5, 2)) // max and current
+ }
+
+ "track descending tendencies" in new MinMaxCounterFixture {
+ mmCounter.increment(5)
+ mmCounter.decrement()
+ mmCounter.decrement(3)
+ mmCounter.decrement()
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(5)
+ snapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 2), // min and current
+ MutableRecord(5, 1)) // max
+ }
+
+ "reset the min and max to the current value after taking a snapshot" in new MinMaxCounterFixture {
+ mmCounter.increment(5)
+ mmCounter.decrement(3)
+
+ val firstSnapshot = collectCounterSnapshot()
+
+ firstSnapshot.min should be(0)
+ firstSnapshot.max should be(5)
+ firstSnapshot.recordsIterator.toStream should contain allOf (
+ MutableRecord(0, 1), // min
+ MutableRecord(2, 1), // current
+ MutableRecord(5, 1)) // max
+
+ val secondSnapshot = collectCounterSnapshot()
+
+ secondSnapshot.min should be(2)
+ secondSnapshot.max should be(2)
+ secondSnapshot.recordsIterator.toStream should contain(
+ MutableRecord(2, 3)) // min, max and current
+ }
+
+ "report zero as the min and current values if they current value fell bellow zero" in new MinMaxCounterFixture {
+ mmCounter.decrement(3)
+
+ val snapshot = collectCounterSnapshot()
+
+ snapshot.min should be(0)
+ snapshot.max should be(0)
+ snapshot.recordsIterator.toStream should contain(
+ MutableRecord(0, 3)) // min, max and current (even while current really is -3
+ }
+ }
+
+ trait MinMaxCounterFixture {
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(64)
+ }
+
+ val mmCounter = MinMaxCounter.fromConfig(minMaxCounterConfig, system).asInstanceOf[PaddedMinMaxCounter]
+ mmCounter.cleanup // cancel the refresh schedule
+
+ def collectCounterSnapshot(): Histogram.Snapshot = mmCounter.collect(collectionContext)
+ }
+}
diff --git a/kamon-core/src/test/scala/kamon/metrics/ActorMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metrics/ActorMetricsSpec.scala
deleted file mode 100644
index 645ca96a..00000000
--- a/kamon-core/src/test/scala/kamon/metrics/ActorMetricsSpec.scala
+++ /dev/null
@@ -1,172 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import org.scalatest.{ WordSpecLike, Matchers }
-import akka.testkit.{ TestProbe, TestKitBase }
-import akka.actor.{ ActorRef, Actor, Props, ActorSystem }
-import com.typesafe.config.ConfigFactory
-import scala.concurrent.duration._
-import kamon.Kamon
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.ActorMetrics.ActorMetricSnapshot
-import kamon.metrics.MetricSnapshot.Measurement
-
-class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | filters = [
- | {
- | actor {
- | includes = [ "user/tracked-*" ]
- | excludes = [ "user/tracked-explicitly-excluded"]
- | }
- | }
- | ]
- |}
- """.stripMargin))
-
- "the Kamon actor metrics" should {
- "respect the configured include and exclude filters" in new DelayableActorFixture {
- val tracked = system.actorOf(Props[DelayableActor], "tracked-actor")
- val nonTracked = system.actorOf(Props[DelayableActor], "non-tracked-actor")
- val trackedExplicitlyExcluded = system.actorOf(Props[DelayableActor], "tracked-explicitly-excluded")
-
- Kamon(Metrics).subscribe(ActorMetrics, "*", testActor, permanently = true)
- expectMsgType[TickMetricSnapshot]
-
- tracked ! Discard
- nonTracked ! Discard
- trackedExplicitlyExcluded ! Discard
-
- within(2 seconds) {
- val tickSnapshot = expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.keys should contain(ActorMetrics("user/tracked-actor"))
- tickSnapshot.metrics.keys should not contain (ActorMetrics("user/non-tracked-actor"))
- tickSnapshot.metrics.keys should not contain (ActorMetrics("user/tracked-explicitly-excluded"))
- }
- }
-
- "record mailbox-size, processing-time and time-in-mailbox metrics under regular conditions" in new DelayableActorFixture {
- val (delayable, metricsListener) = delayableActor("tracked-normal-conditions")
-
- for (_ ← 1 to 10) {
- delayable ! Discard
- }
-
- val actorMetrics = expectActorMetrics("user/tracked-normal-conditions", metricsListener, 3 seconds)
- actorMetrics.mailboxSize.max should be <= 10L
- actorMetrics.processingTime.numberOfMeasurements should be(10L)
- actorMetrics.timeInMailbox.numberOfMeasurements should be(10L)
- }
-
- "keep a correct mailbox-size even if the actor is blocked processing a message" in new DelayableActorFixture {
- val (delayable, metricsListener) = delayableActor("tracked-mailbox-size-queueing-up")
-
- delayable ! Delay(2500 milliseconds)
- for (_ ← 1 to 9) {
- delayable ! Discard
- }
-
- // let the first snapshot pass
- metricsListener.expectMsgType[TickMetricSnapshot]
-
- // process the tick in which the actor is stalled.
- val stalledTickMetrics = expectActorMetrics("user/tracked-mailbox-size-queueing-up", metricsListener, 2 seconds)
- stalledTickMetrics.mailboxSize.numberOfMeasurements should equal(30)
- // only the automatic last-value recording should be taken, and includes the message being currently processed.
- stalledTickMetrics.mailboxSize.measurements should contain only (Measurement(10, 30))
- stalledTickMetrics.mailboxSize.min should equal(10)
- stalledTickMetrics.mailboxSize.max should equal(10)
- stalledTickMetrics.processingTime.numberOfMeasurements should be(0L)
- stalledTickMetrics.timeInMailbox.numberOfMeasurements should be(0L)
-
- // process the tick after the actor is unblocked.
- val afterStallTickMetrics = expectActorMetrics("user/tracked-mailbox-size-queueing-up", metricsListener, 2 seconds)
- afterStallTickMetrics.processingTime.numberOfMeasurements should be(10L)
- afterStallTickMetrics.timeInMailbox.numberOfMeasurements should be(10L)
- afterStallTickMetrics.processingTime.max should be(2500.milliseconds.toNanos +- 100.milliseconds.toNanos)
- afterStallTickMetrics.timeInMailbox.max should be(2500.milliseconds.toNanos +- 100.milliseconds.toNanos)
- }
-
- "track the number of errors" in new ErrorActorFixture {
- val (error, metricsListener) = failedActor("tracked-errors")
-
- for (_ ← 1 to 5) {
- error ! Error
- }
-
- val actorMetrics = expectActorMetrics("user/tracked-errors", metricsListener, 3 seconds)
- actorMetrics.errorCounter.numberOfMeasurements should be(5L)
- }
- }
-
- def expectActorMetrics(actorPath: String, listener: TestProbe, waitTime: FiniteDuration): ActorMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val actorMetricsOption = tickSnapshot.metrics.get(ActorMetrics(actorPath))
- actorMetricsOption should not be empty
- actorMetricsOption.get.asInstanceOf[ActorMetricSnapshot]
- }
-
- trait DelayableActorFixture {
- def delayableActor(name: String): (ActorRef, TestProbe) = {
- val actor = system.actorOf(Props[DelayableActor], name)
- val metricsListener = TestProbe()
-
- Kamon(Metrics).subscribe(ActorMetrics, "user/" + name, metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
-
- (actor, metricsListener)
- }
- }
-
- trait ErrorActorFixture {
- def failedActor(name: String): (ActorRef, TestProbe) = {
- val actor = system.actorOf(Props[FailedActor], name)
- val metricsListener = TestProbe()
-
- Kamon(Metrics).subscribe(ActorMetrics, "user/" + name, metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
-
- (actor, metricsListener)
- }
- }
-}
-
-class DelayableActor extends Actor {
- def receive = {
- case Delay(time) ⇒ Thread.sleep(time.toMillis)
- case Discard ⇒
- }
-}
-
-class FailedActor extends Actor {
- def receive = {
- case Error ⇒ 1 / 0
- case Discard ⇒
- }
-}
-
-case object Discard
-
-case class Delay(time: FiniteDuration)
-
-case class Error()
diff --git a/kamon-core/src/test/scala/kamon/metrics/CustomMetricSpec.scala b/kamon-core/src/test/scala/kamon/metrics/CustomMetricSpec.scala
deleted file mode 100644
index 1e072f71..00000000
--- a/kamon-core/src/test/scala/kamon/metrics/CustomMetricSpec.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import akka.testkit.TestKitBase
-import org.scalatest.{ Matchers, WordSpecLike }
-import akka.actor.ActorSystem
-import scala.concurrent.duration._
-import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.MetricSnapshot.Measurement
-
-class CustomMetricSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | filters = [
- | {
- | custom-metric {
- | includes = [ "test/*" ]
- | excludes = [ ]
- | }
- | }
- | ]
- |}
- """.stripMargin))
-
- "the Kamon custom metrics support" should {
- "allow registering a custom metric with the Metrics extension" in {
- val recorder = Kamon(Metrics).register(CustomMetric("test/sample-counter"), CustomMetric.histogram(100, 2, Scale.Unit))
-
- recorder should be('defined)
- }
-
- "allow subscriptions to custom metrics using the default subscription protocol" in {
- val recorder = Kamon(Metrics).register(CustomMetric("test/sample-counter"), CustomMetric.histogram(100, 2, Scale.Unit))
-
- recorder.map { r ⇒
- r.record(100)
- r.record(15)
- r.record(0)
- r.record(50)
- }
-
- Kamon(Metrics).subscribe(CustomMetric, "test/sample-counter", testActor)
-
- val recordedValues = within(5 seconds) {
- val snapshot = expectMsgType[TickMetricSnapshot]
- snapshot.metrics(CustomMetric("test/sample-counter")).metrics(CustomMetric.RecordedValues)
- }
-
- recordedValues.min should equal(0)
- recordedValues.max should equal(100)
- recordedValues.numberOfMeasurements should equal(4)
- recordedValues.measurements should contain allOf (
- Measurement(0, 1),
- Measurement(15, 1),
- Measurement(50, 1),
- Measurement(100, 1))
- }
- }
-
-}
diff --git a/kamon-core/src/test/scala/kamon/metrics/MetricSnapshotSpec.scala b/kamon-core/src/test/scala/kamon/metrics/MetricSnapshotSpec.scala
deleted file mode 100644
index 4d6ebc49..00000000
--- a/kamon-core/src/test/scala/kamon/metrics/MetricSnapshotSpec.scala
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import org.scalatest.{ Matchers, WordSpec }
-import kamon.metrics.MetricSnapshot.Measurement
-
-class MetricSnapshotSpec extends WordSpec with Matchers {
-
- "a metric snapshot" should {
- "support a max operation" in new SnapshotFixtures {
- snapshotA.max should be(17)
- snapshotB.max should be(10)
- snapshotC.max should be(1)
- }
-
- "support a min operation" in new SnapshotFixtures {
- snapshotA.min should be(1)
- snapshotB.min should be(2)
- snapshotC.min should be(1)
- }
-
- "be able to merge with other snapshot" in new SnapshotFixtures {
- val merged = snapshotA.merge(snapshotB).merge(snapshotC)
-
- merged.min should be(1)
- merged.max should be(17)
- merged.numberOfMeasurements should be(300)
- merged.measurements.map(_.value) should contain inOrderOnly (1, 2, 4, 5, 7, 10, 17)
- }
-
- "be able to merge with empty snapshots" in new SnapshotFixtures {
- snapshotA.merge(emptySnapshot) should be(snapshotA)
- emptySnapshot.merge(snapshotA).merge(emptySnapshot) should be(snapshotA)
- snapshotC.merge(emptySnapshot) should be(snapshotC)
- }
-
- }
-
- trait SnapshotFixtures {
- val emptySnapshot = MetricSnapshot(InstrumentTypes.Histogram, 0, Scale.Unit, Vector.empty)
-
- val snapshotA = MetricSnapshot(InstrumentTypes.Histogram, 100, Scale.Unit, Vector(
- Measurement(1, 3),
- Measurement(2, 15),
- Measurement(5, 68),
- Measurement(7, 13),
- Measurement(17, 1)))
-
- val snapshotB = MetricSnapshot(InstrumentTypes.Histogram, 100, Scale.Unit, Vector(
- Measurement(2, 6),
- Measurement(4, 48),
- Measurement(5, 39),
- Measurement(10, 7)))
-
- val snapshotC = MetricSnapshot(InstrumentTypes.Counter, 100, Scale.Unit, Vector(Measurement(1, 100)))
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/metrics/instrument/MinMaxCounterSpec.scala b/kamon-core/src/test/scala/kamon/metrics/instrument/MinMaxCounterSpec.scala
deleted file mode 100644
index 14f1573f..00000000
--- a/kamon-core/src/test/scala/kamon/metrics/instrument/MinMaxCounterSpec.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/* =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics.instrument
-
-import org.scalatest.{ Matchers, WordSpecLike }
-import kamon.metrics.instruments.MinMaxCounter
-import kamon.metrics.instruments.MinMaxCounter.CounterMeasurement
-
-class MinMaxCounterSpec extends WordSpecLike with Matchers {
-
- "the MinMaxCounter" should {
- "increment" in {
- val counter = MinMaxCounter()
-
- counter.increment()
- counter.increment()
- counter.increment()
- counter.increment()
- counter.increment()
-
- val CounterMeasurement(_, _, current) = counter.collect()
-
- current should be(5)
- }
-
- "decrement" in {
- val counter = MinMaxCounter()
- counter.increment(5L)
-
- counter.decrement()
- counter.decrement()
- counter.decrement()
- counter.decrement()
- counter.decrement()
-
- val CounterMeasurement(_, _, current) = counter.collect()
-
- current should be(0)
- }
-
- "reset the min and max with the sum value when the collect method is called" in {
- val counter = MinMaxCounter()
-
- counter.increment(10)
- counter.increment(20)
- counter.increment(30)
- counter.increment(40)
- counter.increment(50)
-
- counter.collect() //only for check the last value after reset min max
-
- val CounterMeasurement(min, max, current) = counter.collect()
-
- min should be(current)
- max should be(current)
- current should be(150)
- }
- }
-
- "track the min value" in {
- val counter = MinMaxCounter()
-
- counter.increment(10)
- counter.increment(20)
- counter.increment(30)
- counter.increment(40)
- counter.increment(50)
-
- val CounterMeasurement(min, _, _) = counter.collect()
-
- min should be(0)
-
- counter.increment(50)
-
- val CounterMeasurement(minAfterCollectAndAddSomeValues, _, _) = counter.collect()
-
- minAfterCollectAndAddSomeValues should be(150)
- }
-
- "track the max value" in {
- val counter = MinMaxCounter()
- counter.increment(10)
- counter.increment(20)
- counter.increment(30)
- counter.increment(40)
- counter.increment(50)
-
- val CounterMeasurement(_, max, _) = counter.collect()
-
- max should be(150)
-
- counter.increment(200)
-
- val CounterMeasurement(_, maxAfterCollectAndAddSomeValues, _) = counter.collect()
-
- maxAfterCollectAndAddSomeValues should be(350)
- }
-}
diff --git a/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
new file mode 100644
index 00000000..4d0049f1
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
@@ -0,0 +1,95 @@
+package kamon.trace
+
+import akka.actor.ActorSystem
+import akka.testkit.{ ImplicitSender, TestKitBase }
+import com.typesafe.config.ConfigFactory
+import kamon.trace.TraceContext.SegmentIdentity
+import org.scalatest.{ Matchers, WordSpecLike }
+
+class TraceContextManipulationSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
+ implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
+ """
+ |kamon.metrics {
+ | tick-interval = 1 hour
+ | filters = [
+ | {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ | ]
+ | precision {
+ | default-histogram-precision {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ |
+ | default-min-max-counter-precision {
+ | refresh-interval = 1 second
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "the TraceRecorder api" should {
+ "allow starting a trace within a specified block of code, and only within that block of code" in {
+ val createdContext = TraceRecorder.withNewTraceContext("start-context") {
+ TraceRecorder.currentContext should not be empty
+ TraceRecorder.currentContext.get
+ }
+
+ TraceRecorder.currentContext shouldBe empty
+ createdContext.name shouldBe ("start-context")
+ }
+
+ "allow starting a trace within a specified block of code, providing a trace-token and only within that block of code" in {
+ val createdContext = TraceRecorder.withNewTraceContext("start-context-with-token", Some("token-1")) {
+ TraceRecorder.currentContext should not be empty
+ TraceRecorder.currentContext.get
+ }
+
+ TraceRecorder.currentContext shouldBe empty
+ createdContext.name shouldBe ("start-context-with-token")
+ createdContext.token should be("token-1")
+ }
+
+ "allow providing a TraceContext and make it available within a block of code" in {
+ val createdContext = TraceRecorder.withNewTraceContext("manually-provided-trace-context") { TraceRecorder.currentContext }
+
+ TraceRecorder.currentContext shouldBe empty
+ TraceRecorder.withTraceContext(createdContext) {
+ TraceRecorder.currentContext should be(createdContext)
+ }
+
+ TraceRecorder.currentContext shouldBe empty
+ }
+
+ "allow renaming a trace" in {
+ val createdContext = TraceRecorder.withNewTraceContext("trace-before-rename") {
+ TraceRecorder.rename("renamed-trace")
+ TraceRecorder.currentContext.get
+ }
+
+ TraceRecorder.currentContext shouldBe empty
+ createdContext.name shouldBe ("renamed-trace")
+ }
+
+ "allow creating a segment within a trace" in {
+ val createdContext = TraceRecorder.withNewTraceContext("trace-with-segments") {
+ val segmentHandle = TraceRecorder.startSegment(TraceManipulationTestSegment("segment-1"))
+
+ TraceRecorder.currentContext.get
+ }
+
+ TraceRecorder.currentContext shouldBe empty
+ createdContext.name shouldBe ("trace-with-segments")
+
+ }
+ }
+
+ case class TraceManipulationTestSegment(name: String) extends SegmentIdentity
+
+}
diff --git a/kamon-datadog/src/main/resources/reference.conf b/kamon-datadog/src/main/resources/reference.conf
index 231eaf7d..4de24526 100644
--- a/kamon-datadog/src/main/resources/reference.conf
+++ b/kamon-datadog/src/main/resources/reference.conf
@@ -24,6 +24,10 @@ kamon {
dispatcher = [ "*" ]
}
+ # Enable system metrics
+ # In order to not get a ClassNotFoundException, we must register the kamon-sytem-metrics module
+ report-system-metrics = false
+
# Application prefix for all metrics pushed to Datadog. The default namespacing scheme for metrics follows
# this pattern:
# application.entity-name.metric-name
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
index 15d5d3fe..6498f851 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
@@ -16,16 +16,17 @@
package kamon.datadog
+import java.net.InetSocketAddress
+import java.util.concurrent.TimeUnit.MILLISECONDS
+
import akka.actor._
+import akka.event.Logging
import kamon.Kamon
+import kamon.metric._
import kamon.metrics._
-import scala.concurrent.duration._
+
import scala.collection.JavaConverters._
-import com.typesafe.config.Config
-import java.lang.management.ManagementFactory
-import akka.event.Logging
-import java.net.InetSocketAddress
-import java.util.concurrent.TimeUnit.MILLISECONDS
+import scala.concurrent.duration._
object Datadog extends ExtensionId[DatadogExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = Datadog
@@ -67,15 +68,23 @@ class DatadogExtension(system: ExtendedActorSystem) extends Kamon.Extension {
Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, datadogMetricsListener, permanently = true)
}
+ // Subscribe to SystemMetrics
+ val includeSystemMetrics = datadogConfig.getBoolean("report-system-metrics")
+ if (includeSystemMetrics) {
+ List(CPUMetrics, ProcessCPUMetrics, MemoryMetrics, NetworkMetrics, GCMetrics, HeapMetrics) foreach { metric ⇒
+ Kamon(Metrics)(system).subscribe(metric, "*", datadogMetricsListener, permanently = true)
+ }
+ }
+
def buildMetricsListener(tickInterval: Long, flushInterval: Long): ActorRef = {
assert(flushInterval >= tickInterval, "Datadog flush-interval needs to be equal or greater to the tick-interval")
- val metricsTranslator = system.actorOf(DatadogMetricsSender.props(datadogHost, maxPacketSizeInBytes), "datadog-metrics-sender")
+ val metricsSender = system.actorOf(DatadogMetricsSender.props(datadogHost, maxPacketSizeInBytes), "datadog-metrics-sender")
if (flushInterval == tickInterval) {
// No need to buffer the metrics, let's go straight to the metrics sender.
- metricsTranslator
+ metricsSender
} else {
- system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsTranslator), "datadog-metrics-buffer")
+ system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsSender), "datadog-metrics-buffer")
}
}
}
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
index 028e9608..0f67cc34 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
@@ -20,11 +20,10 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.MetricSnapshot.Measurement
-import kamon.metrics.InstrumentTypes.{ Counter, Gauge, Histogram, InstrumentType }
+import kamon.metric.Subscriptions.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
-import kamon.metrics.{ MetricIdentity, MetricGroupIdentity }
+import kamon.metric.instrument.{ Counter, Histogram }
+import kamon.metric.{ MetricIdentity, MetricGroupIdentity }
import java.util.Locale
class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long) extends Actor with UdpExtensionProvider {
@@ -50,7 +49,7 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
}
def writeMetricsToRemote(tick: TickMetricSnapshot, udpSender: ActorRef): Unit = {
- val dataBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
+ val packetBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
for {
(groupIdentity, groupSnapshot) ← tick.metrics
@@ -59,38 +58,43 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
val key = buildMetricName(groupIdentity, metricIdentity)
- for (measurement ← metricSnapshot.measurements) {
- val measurementData = formatMeasurement(groupIdentity, metricIdentity, measurement, metricSnapshot.instrumentType)
- dataBuilder.appendMeasurement(key, measurementData)
+ metricSnapshot match {
+ case hs: Histogram.Snapshot ⇒
+ hs.recordsIterator.foreach { record ⇒
+ val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeStatsDTimer(record.level, record.count))
+ packetBuilder.appendMeasurement(key, measurementData)
+
+ }
+
+ case cs: Counter.Snapshot ⇒
+ val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeStatsDCounter(cs.count))
+ packetBuilder.appendMeasurement(key, measurementData)
}
}
- dataBuilder.flush()
+ packetBuilder.flush()
}
- def formatMeasurement(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity, measurement: Measurement,
- instrumentType: InstrumentType): String = {
-
- StringBuilder.newBuilder.append(buildMeasurementData(measurement, instrumentType))
+ def formatMeasurement(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity, measurementData: String): String =
+ StringBuilder.newBuilder
+ .append(measurementData)
.append(buildIdentificationTag(groupIdentity, metricIdentity))
.result()
- }
- def buildMeasurementData(measurement: Measurement, instrumentType: InstrumentType): String = {
- def dataDogDMetricFormat(value: String, metricType: String, samplingRate: Double = 1D): String =
- s"$value|$metricType${(if (samplingRate != 1D) "|@" + samplingRateFormat.format(samplingRate) else "")}"
-
- instrumentType match {
- case Histogram ⇒ dataDogDMetricFormat(measurement.value.toString, "ms", (1D / measurement.count))
- case Gauge ⇒ dataDogDMetricFormat(measurement.value.toString, "g")
- case Counter ⇒ dataDogDMetricFormat(measurement.count.toString, "c")
- }
+ def encodeStatsDTimer(level: Long, count: Long): String = {
+ val samplingRate: Double = 1D / count
+ level.toString + "|ms" + (if (samplingRate != 1D) "|@" + samplingRateFormat.format(samplingRate) else "")
}
+ def encodeStatsDCounter(count: Long): String = count.toString + "|c"
+
def buildMetricName(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String =
s"$appName.${groupIdentity.category.name}.${metricIdentity.name}"
- def buildIdentificationTag(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String =
- s"|#${groupIdentity.category.name}:${groupIdentity.name}"
+ def buildIdentificationTag(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
+ // Make the automatic HTTP trace names a bit more friendly
+ val normalizedEntityName = groupIdentity.name.replace(": ", ":")
+ s"|#${groupIdentity.category.name}:${normalizedEntityName}"
+ }
}
object DatadogMetricsSender {
diff --git a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
index 6a7191a1..713db30d 100644
--- a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
+++ b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
@@ -16,29 +16,43 @@
package kamon.datadog
-import akka.testkit.{TestKitBase, TestProbe}
-import akka.actor.{Props, ActorRef, ActorSystem}
-import kamon.metrics.instruments.CounterRecorder
-import org.scalatest.{Matchers, WordSpecLike}
-import kamon.metrics._
+import akka.testkit.{ TestKitBase, TestProbe }
+import akka.actor.{ Props, ActorRef, ActorSystem }
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.Precision
+import kamon.metric.instrument.{ Counter, Histogram, HdrHistogram, LongAdderCounter }
+import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric._
import akka.io.Udp
-import org.HdrHistogram.HdrRecorder
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
import java.lang.management.ManagementFactory
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system = ActorSystem("datadog-metric-sender-spec",
- ConfigFactory.parseString("kamon.datadog.max-packet-size = 256 bytes"))
+ implicit lazy val system: ActorSystem = ActorSystem("datadog-metric-sender-spec", ConfigFactory.parseString(
+ """
+ |kamon {
+ | metrics {
+ | disable-aspectj-weaver-missing-error = true
+ | }
+ |
+ | datadog {
+ | max-packet-size = 256 bytes
+ | }
+ |}
+ |
+ """.stripMargin))
+
+ val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
"the DataDogMetricSender" should {
"send latency measurements" in new UdpListenerFixture {
val testMetricName = "processing-time"
- val testRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
testRecorder.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"kamon.actor.processing-time:10|ms|#actor:user/kamon")
@@ -46,11 +60,11 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
"include the sampling rate in case of multiple measurements of the same value" in new UdpListenerFixture {
val testMetricName = "processing-time"
- val testRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
testRecorder.record(10L)
testRecorder.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"kamon.actor.processing-time:10|ms|@0.5|#actor:user/kamon")
@@ -58,7 +72,7 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
val testMetricName = "processing-time"
- val testRecorder = HdrRecorder(testMaxPacketSize, 3, Scale.Unit)
+ val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
var bytes = 0
var level = 0
@@ -69,8 +83,8 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
bytes += s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
- udp.expectMsgType[Udp.Send]// let the first flush pass
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon")
@@ -81,24 +95,21 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
val secondTestMetricName = "processing-time-2"
val thirdTestMetricName = "counter"
- val firstTestRecorder = HdrRecorder(1000L, 2, Scale.Unit)
- val secondTestRecorder = HdrRecorder(1000L, 2, Scale.Unit)
- val thirdTestRecorder = CounterRecorder()
+ val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
+ val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
+ val thirdTestRecorder = Counter()
firstTestRecorder.record(10L)
firstTestRecorder.record(10L)
secondTestRecorder.record(21L)
- thirdTestRecorder.record(1L)
- thirdTestRecorder.record(1L)
- thirdTestRecorder.record(1L)
- thirdTestRecorder.record(1L)
+ thirdTestRecorder.increment(4L)
val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(),
- secondTestMetricName -> secondTestRecorder.collect(),
- thirdTestMetricName -> thirdTestRecorder.collect()))
+ firstTestMetricName -> firstTestRecorder.collect(collectionContext),
+ secondTestMetricName -> secondTestRecorder.collect(collectionContext),
+ thirdTestMetricName -> thirdTestRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be("kamon.actor.processing-time-1:10|ms|@0.5|#actor:user/kamon\nkamon.actor.processing-time-2:21|ms|#actor:user/kamon\nkamon.actor.counter:4|c|#actor:user/kamon")
@@ -109,7 +120,7 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
val localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
val testMaxPacketSize = system.settings.config.getBytes("kamon.datadog.max-packet-size")
- def setup(metrics: Map[String, MetricSnapshotLike]): TestProbe = {
+ def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
val udp = TestProbe()
val metricsSender = system.actorOf(Props(new DatadogMetricsSender(new InetSocketAddress(localhostName, 0), testMaxPacketSize) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
@@ -137,7 +148,10 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
}
metricsSender ! TickMetricSnapshot(0, 0, Map(testGroupIdentity -> new MetricGroupSnapshot {
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = testMetrics.toMap
+ type GroupSnapshotType = Histogram.Snapshot
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
+
+ val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
}))
udp
}
diff --git a/kamon-examples/kamon-play-newrelic-example/app/controllers/NewRelicExample.scala b/kamon-examples/kamon-play-example/app/Global.scala
index 6b89f268..5fbb9c7e 100644
--- a/kamon-examples/kamon-play-newrelic-example/app/controllers/NewRelicExample.scala
+++ b/kamon-examples/kamon-play-example/app/Global.scala
@@ -13,25 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package controllers
-import play.api.mvc.{Action, Controller}
-import play.api.libs.concurrent.Execution.Implicits.defaultContext
-import scala.concurrent._
+import filters.TraceLocalFilter
+import play.api.mvc.WithFilters
-object NewRelicExample extends Controller {
+object Global extends WithFilters(TraceLocalFilter){
- def sayHelloKamon() = Action.async {
- Future {
- play.Logger.info("Say hello to Kamon")
- Ok("Say hello to Kamon")
- }
- }
-
- def sayHelloNewRelic() = Action.async {
- Future {
- play.Logger.info("Say hello to NewRelic")
- Ok("Say hello to NewRelic")
- }
- }
}
+
+
+
diff --git a/kamon-examples/kamon-play-example/app/controllers/KamonPlayExample.scala b/kamon-examples/kamon-play-example/app/controllers/KamonPlayExample.scala
new file mode 100644
index 00000000..2b2e9373
--- /dev/null
+++ b/kamon-examples/kamon-play-example/app/controllers/KamonPlayExample.scala
@@ -0,0 +1,71 @@
+/* ===================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================================================== */
+package controllers
+
+import kamon.play.action.TraceName
+import play.api.Logger
+import play.api.libs.concurrent.Execution.Implicits.defaultContext
+import play.api.mvc.{Action, Controller}
+
+import scala.concurrent._
+
+/**
+ * In order to run the example we need set the -agent parameter to the JVM but Play have some limitations when trying to set an
+ * java agent in Play dev mode (ie, play run) -> https://github.com/playframework/playframework/issues/1372, so we have others options:
+ *
+ * The first option is set -javaagent: path-to-aspectj-weaver in your IDE or
+ *
+ * Run the following commands from console:
+ *
+ * 1- play stage
+ * 2- cd target/universal/stage
+ * 3- java -cp ".:lib/*" -javaagent:lib/org.aspectj.aspectjweaver-1.8.1.jar play.core.server.NettyServer
+ *
+ * and finally for test:
+ *
+ * curl -i -H 'X-Trace-Token:kamon-test' -H 'MyTraceLocalStorageKey:extra-header' -X GET "http://localhost:9000/helloKamon"
+ *
+ * we should get:
+ * HTTP/1.1 200 OK
+ * Content-Type: text/plain; charset=utf-8
+ * MyTraceLocalStorageKey: extra-header -> Extra Information
+ * X-Trace-Token: kamon-test -> default Trace-Token
+ *
+ * Say hello to Kamon
+ */*/
+
+
+object KamonPlayExample extends Controller {
+
+ val logger = Logger(this.getClass)
+
+ def sayHello() = Action.async {
+ Future {
+ logger.info("Say hello to Kamon")
+ Ok("Say hello to Kamon")
+ }
+ }
+
+ //using the Kamon TraceName Action to rename the trace name in metrics
+ def sayHelloWithTraceName() = TraceName("my-trace-name") {
+ Action.async {
+ Future {
+ logger.info("Say hello to Kamon")
+ Ok("Say hello to Kamon")
+ }
+ }
+ }
+}
diff --git a/kamon-examples/kamon-play-example/app/filters/TraceLocalFilter.scala b/kamon-examples/kamon-play-example/app/filters/TraceLocalFilter.scala
new file mode 100644
index 00000000..08ea782c
--- /dev/null
+++ b/kamon-examples/kamon-play-example/app/filters/TraceLocalFilter.scala
@@ -0,0 +1,52 @@
+/* ===================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================================================== */
+
+package filters
+
+import kamon.trace.{TraceRecorder, TraceLocal}
+import play.api.Logger
+import play.api.mvc.{Result, RequestHeader, Filter}
+import play.api.libs.concurrent.Execution.Implicits.defaultContext
+import scala.concurrent.Future
+
+object TraceLocalKey extends TraceLocal.TraceLocalKey {
+ type ValueType = String
+}
+
+/*
+ By default Kamon spreads the trace-token-header-name but sometimes is necessary pass through the application requests with some infomation like
+ extra headers, with kamon it's possible using TraceLocalStorage, in Play applications we can do an Action Filter or using Action Composition,
+ in this example we are using a simple filter where given a Header store the value and then put the value in the result headers..
+
+ More detailed usage of TraceLocalStorage: https://github.com/kamon-io/Kamon/blob/b17539d231da923ea854c01d2c69eb02ef1e85b1/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
+ */
+object TraceLocalFilter extends Filter {
+ val logger = Logger(this.getClass)
+ val TraceLocalStorageKey = "MyTraceLocalStorageKey"
+
+ override def apply(next: (RequestHeader) ⇒ Future[Result])(header: RequestHeader): Future[Result] = {
+ TraceRecorder.withTraceContext(TraceRecorder.currentContext) {
+
+ TraceLocal.store(TraceLocalKey)(header.headers.get(TraceLocalStorageKey).getOrElse("unknown"))
+
+ next(header).map {
+ val traceTokenValue = TraceLocal.retrieve(TraceLocalKey).getOrElse("unknown")
+ logger.info(s"traceTokenValue: $traceTokenValue")
+ result ⇒ result.withHeaders((TraceLocalStorageKey -> traceTokenValue))
+ }
+ }
+ }
+}
diff --git a/kamon-examples/kamon-play-example/conf/application.conf b/kamon-examples/kamon-play-example/conf/application.conf
new file mode 100644
index 00000000..4f9a60ec
--- /dev/null
+++ b/kamon-examples/kamon-play-example/conf/application.conf
@@ -0,0 +1,61 @@
+#kamon related configuration
+akka {
+ extensions = ["kamon.statsd.StatsD"]
+}
+
+kamon {
+
+ statsd {
+ # Hostname and port in which your StatsD is running. Remember that StatsD packets are sent using UDP and
+ # setting unreachable hosts and/or not open ports wont be warned by the Kamon, your data wont go anywhere.
+ hostname = "127.0.0.1"
+ port = 8125
+
+ # Interval between metrics data flushes to StatsD. It's value must be equal or greater than the
+ # kamon.metrics.tick-interval setting.
+ flush-interval = 1 second
+
+ # Max packet size for UDP metrics data sent to StatsD.
+ max-packet-size = 1024 bytes
+
+ # Subscription patterns used to select which metrics will be pushed to StatsD. Note that first, metrics
+ # collection for your desired entities must be activated under the kamon.metrics.filters settings.
+ includes {
+ actor = [ "*" ]
+ trace = [ "*" ]
+ dispatcher = [ "*" ]
+ }
+
+ simple-metric-key-generator {
+ # Application prefix for all metrics pushed to StatsD. The default namespacing scheme for metrics follows
+ # this pattern:
+ # application.host.entity.entity-name.metric-name
+ application = "kamon"
+ }
+ }
+
+ play {
+ include-trace-token-header = true
+ trace-token-header-name = "X-Trace-Token"
+ }
+}
+
+# This is the main configuration file for the application.
+# ~~~~~
+
+# Secret key
+# ~~~~~
+# The secret key is used to secure cryptographics functions.
+# If you deploy your application to several instances be sure to use the same key!
+application.secret="3BLM`<aD^5r/L[MinNdw8Tp@915n0djY[g66OSOLi@?k`>AZE9EOphrmf;;6JsAN"
+
+# The application languages
+# ~~~~~
+application.langs="en"
+
+# Global object class
+# ~~~~~
+# Define the Global object class for this application.
+# Default to Global in the root package.
+# application.global=Global
+
diff --git a/kamon-examples/kamon-play-newrelic-example/conf/logger.xml b/kamon-examples/kamon-play-example/conf/logger.xml
index 84126e9d..84126e9d 100644
--- a/kamon-examples/kamon-play-newrelic-example/conf/logger.xml
+++ b/kamon-examples/kamon-play-example/conf/logger.xml
diff --git a/kamon-examples/kamon-play-example/conf/routes b/kamon-examples/kamon-play-example/conf/routes
new file mode 100644
index 00000000..122c355a
--- /dev/null
+++ b/kamon-examples/kamon-play-example/conf/routes
@@ -0,0 +1,3 @@
+# Routes
+GET /helloKamon controllers.KamonPlayExample.sayHello
+GET /helloKamonWithTraceName controllers.KamonPlayExample.sayHelloWithTraceName \ No newline at end of file
diff --git a/kamon-examples/kamon-play-example/project/Build.scala b/kamon-examples/kamon-play-example/project/Build.scala
new file mode 100644
index 00000000..c348862a
--- /dev/null
+++ b/kamon-examples/kamon-play-example/project/Build.scala
@@ -0,0 +1,48 @@
+import java.io.File
+import sbt._
+import Keys._
+import play.Play.autoImport._
+import sbt.Keys._
+import sbt._
+import com.typesafe.sbt.web.SbtWeb
+
+
+object ApplicationBuild extends Build {
+
+ val appName = "Kamon-Play-Example"
+ val appVersion = "1.0-SNAPSHOT"
+
+ val resolutionRepos = Seq(
+ "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
+ "Sonatype Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
+ "Sonatype Releases" at "https://oss.sonatype.org/content/repositories/releases",
+ "Kamon Repository Snapshots" at "http://snapshots.kamon.io"
+ )
+
+ val defaultSettings = Seq(
+ scalaVersion := "2.11.0",
+ resolvers ++= resolutionRepos,
+ scalacOptions := Seq(
+ "-encoding",
+ "utf8",
+ "-g:vars",
+ "-feature",
+ "-unchecked",
+ "-deprecation",
+ "-target:jvm-1.6",
+ "-language:postfixOps",
+ "-language:implicitConversions",
+ "-Xlog-reflective-calls"
+ ))
+
+ val dependencies = Seq(
+ "io.kamon" %% "kamon-core" % "0.3.1",
+ "io.kamon" %% "kamon-play" % "0.3.1",
+ "io.kamon" %% "kamon-statsd" % "0.3.1",
+ "org.aspectj" % "aspectjweaver" % "1.8.1"
+ )
+
+ val main = Project(appName, file(".")).enablePlugins(play.PlayScala, SbtWeb)
+ .settings(libraryDependencies ++= dependencies)
+ .settings(defaultSettings: _*)
+}
diff --git a/kamon-examples/kamon-play-example/project/build.properties b/kamon-examples/kamon-play-example/project/build.properties
new file mode 100644
index 00000000..be6c454f
--- /dev/null
+++ b/kamon-examples/kamon-play-example/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.13.5
diff --git a/kamon-examples/kamon-play-newrelic-example/project/plugins.sbt b/kamon-examples/kamon-play-example/project/plugins.sbt
index 065c342e..adc5b325 100644
--- a/kamon-examples/kamon-play-newrelic-example/project/plugins.sbt
+++ b/kamon-examples/kamon-play-example/project/plugins.sbt
@@ -5,5 +5,5 @@ logLevel := Level.Warn
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
// Use the Play sbt plugin for Play projects
-addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.2.1")
+addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.1")
diff --git a/kamon-examples/kamon-play-newrelic-example/public/images/favicon.png b/kamon-examples/kamon-play-example/public/images/favicon.png
index c7d92d2a..c7d92d2a 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/images/favicon.png
+++ b/kamon-examples/kamon-play-example/public/images/favicon.png
Binary files differ
diff --git a/kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings-white.png b/kamon-examples/kamon-play-example/public/images/glyphicons-halflings-white.png
index 3bf6484a..3bf6484a 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings-white.png
+++ b/kamon-examples/kamon-play-example/public/images/glyphicons-halflings-white.png
Binary files differ
diff --git a/kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings.png b/kamon-examples/kamon-play-example/public/images/glyphicons-halflings.png
index a9969993..a9969993 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/images/glyphicons-halflings.png
+++ b/kamon-examples/kamon-play-example/public/images/glyphicons-halflings.png
Binary files differ
diff --git a/kamon-examples/kamon-play-newrelic-example/public/javascripts/jquery-1.9.0.min.js b/kamon-examples/kamon-play-example/public/javascripts/jquery-1.9.0.min.js
index 50d1b22f..50d1b22f 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/javascripts/jquery-1.9.0.min.js
+++ b/kamon-examples/kamon-play-example/public/javascripts/jquery-1.9.0.min.js
diff --git a/kamon-examples/kamon-play-newrelic-example/public/stylesheets/bootstrap.css b/kamon-examples/kamon-play-example/public/stylesheets/bootstrap.css
index 2f56af33..2f56af33 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/stylesheets/bootstrap.css
+++ b/kamon-examples/kamon-play-example/public/stylesheets/bootstrap.css
diff --git a/kamon-examples/kamon-play-newrelic-example/public/stylesheets/main.css b/kamon-examples/kamon-play-example/public/stylesheets/main.css
index e69de29b..e69de29b 100644
--- a/kamon-examples/kamon-play-newrelic-example/public/stylesheets/main.css
+++ b/kamon-examples/kamon-play-example/public/stylesheets/main.css
diff --git a/kamon-examples/kamon-play-newrelic-example/conf/application.conf b/kamon-examples/kamon-play-newrelic-example/conf/application.conf
deleted file mode 100644
index 0156d3b4..00000000
--- a/kamon-examples/kamon-play-newrelic-example/conf/application.conf
+++ /dev/null
@@ -1,51 +0,0 @@
-#kamon related configuration
-
-akka {
- extensions = ["kamon.newrelic.NewRelic"]
-}
-
-kamon {
-
- newrelic {
- app-name = "KamonNewRelicExample[Development]"
- license-key = <<Your Key>>
- }
-
- play {
- include-trace-token-header = true
- trace-token-header-name = "X-Trace-Token"
- }
-}
-
-# This is the main configuration file for the application.
-# ~~~~~
-
-# Secret key
-# ~~~~~
-# The secret key is used to secure cryptographics functions.
-# If you deploy your application to several instances be sure to use the same key!
-application.secret="3BLM`<aD^5r/L[MinNdw8Tp@915n0djY[g66OSOLi@?k`>AZE9EOphrmf;;6JsAN"
-
-# The application languages
-# ~~~~~
-application.langs="en"
-
-# Global object class
-# ~~~~~
-# Define the Global object class for this application.
-# Default to Global in the root package.
-# application.global=Global
-
-# Logger
-# ~~~~~
-# You can also configure logback (http://logback.qos.ch/), by providing a logger.xml file in the conf directory .
-
-# Root logger:
-logger.root=ERROR
-
-# Logger used by the framework:
-logger.play=INFO
-
-# Logger provided to your application:
-logger.application=DEBUG
-
diff --git a/kamon-examples/kamon-play-newrelic-example/conf/routes b/kamon-examples/kamon-play-newrelic-example/conf/routes
deleted file mode 100644
index c1c9d64b..00000000
--- a/kamon-examples/kamon-play-newrelic-example/conf/routes
+++ /dev/null
@@ -1,3 +0,0 @@
-# Routes
-GET /helloKamon controllers.NewRelicExample.sayHelloKamon
-GET /helloNewRelic controllers.NewRelicExample.sayHelloNewRelic
diff --git a/kamon-examples/kamon-play-newrelic-example/project/Build.scala b/kamon-examples/kamon-play-newrelic-example/project/Build.scala
deleted file mode 100644
index fa240c39..00000000
--- a/kamon-examples/kamon-play-newrelic-example/project/Build.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-import java.io.File
-import sbt._
-import Keys._
-import play.Project._
-
-object ApplicationBuild extends Build {
-
- val appName = "Kamon-Play-NewRelic-Example"
- val appVersion = "1.0-SNAPSHOT"
-
- val appDependencies = Seq(
- "kamon" % "kamon-core" % "0.0.14-SNAPSHOT",
- "kamon" % "kamon-play" % "0.0.14-SNAPSHOT",
- "kamon" % "kamon-newrelic" % "0.0.14-SNAPSHOT"
- )
-
-
- val main = play.Project(appName, appVersion, appDependencies).settings(
- // For additionally resolving from the conventional ivy local home.
- resolvers += Resolver.file("LocalIvy", file(Path.userHome +
- File.separator + ".ivy2" + File.separator + "local"))(Resolver.ivyStylePatterns))
-}
diff --git a/kamon-examples/kamon-play-newrelic-example/project/build.properties b/kamon-examples/kamon-play-newrelic-example/project/build.properties
deleted file mode 100644
index 0974fce4..00000000
--- a/kamon-examples/kamon-play-newrelic-example/project/build.properties
+++ /dev/null
@@ -1 +0,0 @@
-sbt.version=0.13.0
diff --git a/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
new file mode 100644
index 00000000..b0cc2551
--- /dev/null
+++ b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
@@ -0,0 +1,227 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.logreporter
+
+import akka.actor._
+import kamon.Kamon
+import kamon.metric.ActorMetrics.ActorMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.TraceMetrics.TraceMetricsSnapshot
+import kamon.metric.UserMetrics.{ UserCounter, UserMetricsSnapshot }
+import kamon.metric.instrument.{ Counter, Histogram }
+import kamon.metric._
+
+object LogReporter extends ExtensionId[LogReporterExtension] with ExtensionIdProvider {
+ override def lookup(): ExtensionId[_ <: Extension] = LogReporter
+ override def createExtension(system: ExtendedActorSystem): LogReporterExtension = new LogReporterExtension(system)
+
+ trait MetricKeyGenerator {
+ def localhostName: String
+ def normalizedLocalhostName: String
+ def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
+ }
+}
+
+class LogReporterExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ val subscriber = system.actorOf(Props[LogReporterSubscriber], "kamon-log-reporter")
+ Kamon(Metrics)(system).subscribe(TraceMetrics, "*", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe(ActorMetrics, "*", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe(UserMetrics.category, "*", subscriber, permanently = true)
+
+}
+
+class LogReporterSubscriber extends Actor with ActorLogging {
+ import LogReporterSubscriber.RichHistogramSnapshot
+
+ def receive = {
+ case tick: TickMetricSnapshot ⇒ printMetricSnapshot(tick)
+ }
+
+ def printMetricSnapshot(tick: TickMetricSnapshot): Unit = tick.metrics foreach {
+ case (identity, ams: ActorMetricSnapshot) ⇒ logActorMetrics(identity.name, ams)
+ case (identity, tms: TraceMetricsSnapshot) ⇒ logTraceMetrics(identity.name, tms)
+ case (_, ums: UserMetricsSnapshot) ⇒ logUserMetrics(ums)
+ }
+
+ def logActorMetrics(name: String, ams: ActorMetricSnapshot): Unit = {
+ log.info(
+ """
+ |+--------------------------------------------------------------------------------------------------+
+ || |
+ || Actor: %-83s |
+ || |
+ || Processing Time (nanoseconds) Time in Mailbox (nanoseconds) Mailbox Size |
+ || Msg Count: %-12s Msg Count: %-12s Min: %-8s |
+ || Min: %-12s Min: %-12s Avg.: %-8s |
+ || 50th Perc: %-12s 50th Perc: %-12s Max: %-8s |
+ || 90th Perc: %-12s 90th Perc: %-12s |
+ || 95th Perc: %-12s 95th Perc: %-12s |
+ || 99th Perc: %-12s 99th Perc: %-12s Error Count: %-6s |
+ || 99.9th Perc: %-12s 99.9th Perc: %-12s |
+ || Max: %-12s Max: %-12s |
+ || |
+ |+--------------------------------------------------------------------------------------------------+"""
+ .stripMargin.format(
+ name,
+ ams.processingTime.numberOfMeasurements, ams.timeInMailbox.numberOfMeasurements, ams.mailboxSize.min,
+ ams.processingTime.min, ams.timeInMailbox.min, ams.mailboxSize.average,
+ ams.processingTime.percentile(0.50F), ams.timeInMailbox.percentile(0.50F), ams.mailboxSize.max,
+ ams.processingTime.percentile(0.90F), ams.timeInMailbox.percentile(0.90F),
+ ams.processingTime.percentile(0.95F), ams.timeInMailbox.percentile(0.95F),
+ ams.processingTime.percentile(0.99F), ams.timeInMailbox.percentile(0.99F), ams.errors.count,
+ ams.processingTime.percentile(0.999F), ams.timeInMailbox.percentile(0.999F),
+ ams.processingTime.max, ams.timeInMailbox.max))
+ }
+
+ def logTraceMetrics(name: String, tms: TraceMetricsSnapshot): Unit = {
+ val traceMetricsData = StringBuilder.newBuilder
+
+ traceMetricsData.append(
+ """
+ |+--------------------------------------------------------------------------------------------------+
+ || |
+ || Trace: %-83s |
+ || Count: %-8s |
+ || |
+ || Elapsed Time (nanoseconds): |
+ |"""
+ .stripMargin.format(
+ name, tms.elapsedTime.numberOfMeasurements))
+
+ traceMetricsData.append(compactHistogramView(tms.elapsedTime))
+ traceMetricsData.append(
+ """
+ || |
+ |+--------------------------------------------------------------------------------------------------+"""
+ .stripMargin)
+
+ log.info(traceMetricsData.toString())
+ }
+
+ def logUserMetrics(ums: UserMetricsSnapshot): Unit = {
+ val userMetricsData = StringBuilder.newBuilder
+
+ userMetricsData.append(
+ """
+ |+--------------------------------------------------------------------------------------------------+
+ || |
+ || User Counters |
+ || ------------- |
+ |""".stripMargin)
+
+ ums.counters.toList.sortBy(_._1.name.toLowerCase).foreach {
+ case (counter, snapshot) ⇒ userMetricsData.append(userCounterString(counter, snapshot))
+ }
+
+ userMetricsData.append(
+ """|| |
+ || |
+ || User Histograms |
+ || --------------- |
+ |""".stripMargin)
+
+ ums.histograms.foreach {
+ case (histogram, snapshot) ⇒
+ userMetricsData.append("| %-40s |\n".format(histogram.name))
+ userMetricsData.append(compactHistogramView(snapshot))
+ userMetricsData.append("\n| |\n")
+ }
+
+ userMetricsData.append(
+ """|| |
+ || User MinMaxCounters |
+ || ------------------- |
+ |""".stripMargin)
+
+ ums.minMaxCounters.foreach {
+ case (minMaxCounter, snapshot) ⇒
+ userMetricsData.append("| %-40s |\n".format(minMaxCounter.name))
+ userMetricsData.append(simpleHistogramView(snapshot))
+ userMetricsData.append("\n| |\n")
+ }
+
+ userMetricsData.append(
+ """|| |
+ || User Gauges |
+ || ----------- |
+ |"""
+ .stripMargin)
+
+ ums.gauges.foreach {
+ case (gauge, snapshot) ⇒
+ userMetricsData.append("| %-40s |\n".format(gauge.name))
+ userMetricsData.append(simpleHistogramView(snapshot))
+ userMetricsData.append("\n| |\n")
+ }
+
+ userMetricsData.append(
+ """|| |
+ |+--------------------------------------------------------------------------------------------------+"""
+ .stripMargin)
+
+ log.info(userMetricsData.toString())
+ }
+
+ def userCounterString(counter: UserCounter, snapshot: Counter.Snapshot): String = {
+ "| %30s => %-12s |\n"
+ .format(counter.name, snapshot.count)
+ }
+
+ def compactHistogramView(histogram: Histogram.Snapshot): String = {
+ val sb = StringBuilder.newBuilder
+
+ sb.append("| Min: %-11s 50th Perc: %-12s 90th Perc: %-12s 95th Perc: %-12s |\n".format(
+ histogram.min, histogram.percentile(0.50F), histogram.percentile(0.90F), histogram.percentile(0.95F)))
+ sb.append("| 99th Perc: %-12s 99.9th Perc: %-12s Max: %-12s |".format(
+ histogram.percentile(0.99F), histogram.percentile(0.999F), histogram.max))
+
+ sb.toString()
+ }
+
+ def simpleHistogramView(histogram: Histogram.Snapshot): String =
+ "| Min: %-12s Average: %-12s Max: %-12s |"
+ .format(histogram.min, histogram.average, histogram.max)
+}
+
+object LogReporterSubscriber {
+
+ implicit class RichHistogramSnapshot(histogram: Histogram.Snapshot) {
+ def percentile(q: Float): Long = {
+ val records = histogram.recordsIterator
+ val qThreshold = histogram.numberOfMeasurements * q
+ var countToCurrentLevel = 0L
+ var qLevel = 0L
+
+ while (countToCurrentLevel < qThreshold && records.hasNext) {
+ val record = records.next()
+ countToCurrentLevel += record.count
+ qLevel = record.level
+ }
+
+ qLevel
+ }
+
+ def average: Double = {
+ var acc = 0L
+ for (record ← histogram.recordsIterator) {
+ acc += record.count * record.level
+ }
+
+ return acc / histogram.numberOfMeasurements
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-macros/src/main/scala/kamon/macros/InlineTraceContextMacro.scala b/kamon-macros/src/main/scala/kamon/macros/InlineTraceContextMacro.scala
new file mode 100644
index 00000000..ea8cc7b8
--- /dev/null
+++ b/kamon-macros/src/main/scala/kamon/macros/InlineTraceContextMacro.scala
@@ -0,0 +1,46 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.macros
+
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object InlineTraceContextMacro {
+
+ // Macro to generate an inline version of kamon.trace.TraceRecorder.withTraceContext
+ def withInlineTraceContextImpl[T: c.WeakTypeTag, TC: c.WeakTypeTag](c: Context)(traceCtx: c.Expr[TC])(thunk: c.Expr[T]) = {
+ import c.universe._
+
+ val inlineThunk =
+ Block(
+ List(
+ ValDef(
+ Modifiers(), newTermName("oldContext"), TypeTree(),
+ Select(Ident(newTermName("TraceRecorder")), newTermName("currentContext"))),
+ Apply(
+ Select(Ident(newTermName("TraceRecorder")), newTermName("setContext")),
+ List(traceCtx.tree))),
+ Try(
+ thunk.tree,
+ List(),
+ Apply(
+ Select(Ident(newTermName("TraceRecorder")), newTermName("setContext")),
+ List(Ident(newTermName("oldContext"))))))
+
+ c.Expr[T](inlineThunk)
+ }
+}
diff --git a/kamon-newrelic/src/main/resources/reference.conf b/kamon-newrelic/src/main/resources/reference.conf
index 24522d63..13aaca2f 100644
--- a/kamon-newrelic/src/main/resources/reference.conf
+++ b/kamon-newrelic/src/main/resources/reference.conf
@@ -7,7 +7,7 @@ kamon {
apdexT = 1 second
app-name = "Kamon[Development]"
- license-key = 2e24765acb032cb9e7207013b5ba3e2ab7d2d75c
+ license-key = e7d350b14228f3d28f35bc3140df2c3e565ea5d5
}
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetrics.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetrics.scala
index 08e0add3..44d9c605 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetrics.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetrics.scala
@@ -17,16 +17,16 @@
package kamon.newrelic
import akka.actor.Actor
-import kamon.metrics._
+import kamon.metric._
trait CustomMetrics {
self: Actor ⇒
def collectCustomMetrics(metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Seq[NewRelic.Metric] = {
metrics.collect {
- case (CustomMetric(name), groupSnapshot) ⇒
+ case (UserMetrics, groupSnapshot) ⇒
groupSnapshot.metrics collect {
- case (_, snapshot) ⇒ toNewRelicMetric(Scale.Unit)(s"Custom/$name", None, snapshot)
+ case (name, snapshot) ⇒ toNewRelicMetric(Scale.Unit)(s"Custom/$name", None, snapshot)
}
}.flatten.toSeq
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricTranslator.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricTranslator.scala
index 46e22571..a3bb6311 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricTranslator.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricTranslator.scala
@@ -17,7 +17,7 @@
package kamon.newrelic
import akka.actor.{ Props, ActorRef, Actor }
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
import kamon.newrelic.MetricTranslator.TimeSliceMetrics
class MetricTranslator(receiver: ActorRef) extends Actor
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelic.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelic.scala
index c195ed12..d83e0335 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelic.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelic.scala
@@ -18,19 +18,20 @@ package kamon.newrelic
import akka.actor._
import scala.concurrent.duration._
import kamon.Kamon
-import kamon.metrics.{ CustomMetric, TickMetricSnapshotBuffer, TraceMetrics, Metrics }
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.{ UserMetrics, TickMetricSnapshotBuffer, TraceMetrics, Metrics }
+import kamon.metric.Subscriptions.TickMetricSnapshot
import akka.actor
import java.util.concurrent.TimeUnit.MILLISECONDS
class NewRelicExtension(system: ExtendedActorSystem) extends Kamon.Extension {
val config = system.settings.config.getConfig("kamon.newrelic")
+ val collectionContext = Kamon(Metrics)(system).buildDefaultCollectionContext
val metricsListener = system.actorOf(Props[NewRelicMetricsListener], "kamon-newrelic")
val apdexT: Double = config.getDuration("apdexT", MILLISECONDS) / 1E3 // scale to seconds.
Kamon(Metrics)(system).subscribe(TraceMetrics, "*", metricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(CustomMetric, "*", metricsListener, permanently = true)
+ Kamon(Metrics)(system).subscribe(UserMetrics.category, "*", metricsListener, permanently = true)
}
class NewRelicMetricsListener extends Actor with ActorLogging {
@@ -50,7 +51,7 @@ object NewRelic extends ExtensionId[NewRelicExtension] with ExtensionIdProvider
def createExtension(system: ExtendedActorSystem): NewRelicExtension = new NewRelicExtension(system)
case class Metric(name: String, scope: Option[String], callCount: Long, total: Double, totalExclusive: Double,
- min: Double, max: Double, sumOfSquares: Double) {
+ min: Double, max: Double, sumOfSquares: Double) {
def merge(that: Metric): Metric = {
Metric(name, scope,
@@ -61,6 +62,5 @@ object NewRelic extends ExtensionId[NewRelicExtension] with ExtensionIdProvider
math.max(max, that.max),
sumOfSquares + that.sumOfSquares)
}
-
}
} \ No newline at end of file
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetrics.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetrics.scala
index 90f1e8a5..a8c54684 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetrics.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetrics.scala
@@ -16,28 +16,32 @@
package kamon.newrelic
-import kamon.metrics._
-import kamon.metrics.TraceMetrics.ElapsedTime
+import kamon.metric._
+import kamon.metric.TraceMetrics.ElapsedTime
import akka.actor.Actor
import kamon.Kamon
+import kamon.metric.instrument.Histogram
trait WebTransactionMetrics {
self: Actor ⇒
def collectWebTransactionMetrics(metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Seq[NewRelic.Metric] = {
- val apdexBuilder = new ApdexBuilder("Apdex", None, Kamon(NewRelic)(context.system).apdexT)
+ val newRelicExtension = Kamon(NewRelic)(context.system)
+ val apdexBuilder = new ApdexBuilder("Apdex", None, newRelicExtension.apdexT)
+ val collectionContext = newRelicExtension.collectionContext
// Trace metrics are recorded in nanoseconds.
- var accumulatedHttpDispatcher: MetricSnapshotLike = MetricSnapshot(InstrumentTypes.Histogram, 0, Scale.Nano, Vector.empty)
+ var accumulatedHttpDispatcher: Histogram.Snapshot = Histogram.Snapshot.empty(Scale.Nano)
val webTransactionMetrics = metrics.collect {
case (TraceMetrics(name), groupSnapshot) ⇒
groupSnapshot.metrics collect {
- case (ElapsedTime, snapshot) ⇒
- accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(snapshot)
- snapshot.measurements.foreach(level ⇒
- apdexBuilder.record(Scale.convert(snapshot.scale, Scale.Unit, level.value), level.count))
+ case (ElapsedTime, snapshot: Histogram.Snapshot) ⇒
+ accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(snapshot, collectionContext)
+ snapshot.recordsIterator.foreach { record ⇒
+ apdexBuilder.record(Scale.convert(snapshot.scale, Scale.Unit, record.level), record.count)
+ }
toNewRelicMetric(Scale.Unit)(s"WebTransaction/Custom/$name", None, snapshot)
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/package.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/package.scala
index f6e377c7..89a8b15b 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/package.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/package.scala
@@ -16,26 +16,30 @@
package kamon
-import kamon.metrics.{ Scale, MetricSnapshotLike }
+import kamon.metric.instrument.{ Counter, Histogram }
+import kamon.metric.{ MetricSnapshot, Scale }
package object newrelic {
- def toNewRelicMetric(scale: Scale)(name: String, scope: Option[String], snapshot: MetricSnapshotLike): NewRelic.Metric = {
- var total: Double = 0D
- var sumOfSquares: Double = 0D
+ def toNewRelicMetric(scale: Scale)(name: String, scope: Option[String], snapshot: MetricSnapshot): NewRelic.Metric = {
+ snapshot match {
+ case hs: Histogram.Snapshot ⇒
+ var total: Double = 0D
+ var sumOfSquares: Double = 0D
+ val scaledMin = Scale.convert(hs.scale, scale, hs.min)
+ val scaledMax = Scale.convert(hs.scale, scale, hs.max)
- val measurementLevels = snapshot.measurements.iterator
- while (measurementLevels.hasNext) {
- val level = measurementLevels.next()
- val scaledValue = Scale.convert(snapshot.scale, scale, level.value)
+ hs.recordsIterator.foreach { record ⇒
+ val scaledValue = Scale.convert(hs.scale, scale, record.level)
- total += scaledValue * level.count
- sumOfSquares += (scaledValue * scaledValue) * level.count
- }
+ total += scaledValue * record.count
+ sumOfSquares += (scaledValue * scaledValue) * record.count
+ }
- val scaledMin = Scale.convert(snapshot.scale, scale, snapshot.min)
- val scaledMax = Scale.convert(snapshot.scale, scale, snapshot.max)
+ NewRelic.Metric(name, scope, hs.numberOfMeasurements, total, total, scaledMin, scaledMax, sumOfSquares)
- NewRelic.Metric(name, scope, snapshot.numberOfMeasurements, total, total, scaledMin, scaledMax, sumOfSquares)
+ case cs: Counter.Snapshot ⇒
+ NewRelic.Metric(name, scope, cs.count, cs.count, cs.count, 0, cs.count, cs.count * cs.count)
+ }
}
}
diff --git a/kamon-play/src/main/resources/META-INF/aop.xml b/kamon-play/src/main/resources/META-INF/aop.xml
index ca499a33..e24d48d5 100644
--- a/kamon-play/src/main/resources/META-INF/aop.xml
+++ b/kamon-play/src/main/resources/META-INF/aop.xml
@@ -4,6 +4,7 @@
<aspects>
<aspect name="kamon.play.instrumentation.RequestInstrumentation"/>
<aspect name="kamon.play.instrumentation.WSInstrumentation"/>
+ <aspect name="kamon.play.instrumentation.LoggerLikeInstrumentation"/>
</aspects>
<weaver>
diff --git a/kamon-play/src/main/resources/reference.conf b/kamon-play/src/main/resources/reference.conf
index 47a31ef4..72266a0c 100644
--- a/kamon-play/src/main/resources/reference.conf
+++ b/kamon-play/src/main/resources/reference.conf
@@ -3,6 +3,10 @@
# ================================== #
kamon {
+ metrics {
+ tick-interval = 1 hour
+ }
+
play {
include-trace-token-header = true
trace-token-header-name = "X-Trace-Token"
diff --git a/kamon-play/src/main/scala/kamon/play/Play.scala b/kamon-play/src/main/scala/kamon/play/Play.scala
index ca9c10e5..03436458 100644
--- a/kamon-play/src/main/scala/kamon/play/Play.scala
+++ b/kamon-play/src/main/scala/kamon/play/Play.scala
@@ -18,6 +18,8 @@ package kamon.play
import akka.actor.{ ExtendedActorSystem, Extension, ExtensionIdProvider, ExtensionId }
import kamon.Kamon
+import kamon.http.HttpServerMetrics
+import kamon.metric.Metrics
object Play extends ExtensionId[PlayExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = Play
@@ -29,6 +31,7 @@ class PlayExtension(private val system: ExtendedActorSystem) extends Kamon.Exten
private val config = system.settings.config.getConfig("kamon.play")
+ val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
val defaultDispatcher = system.dispatchers.lookup(config.getString("dispatcher"))
val includeTraceToken: Boolean = config.getBoolean("include-trace-token-header")
val traceTokenHeaderName: String = config.getString("trace-token-header-name")
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala
new file mode 100644
index 00000000..b7afeb76
--- /dev/null
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/LoggerLikeInstrumentation.scala
@@ -0,0 +1,70 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.play.instrumentation
+
+import kamon.trace.{ TraceContext, TraceContextAware }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
+import org.slf4j.MDC
+
+@Aspect
+class LoggerLikeInstrumentation {
+
+ import LoggerLikeInstrumentation._
+
+ @DeclareMixin("play.api.LoggerLike+")
+ def mixinContextAwareToLoggerLike: TraceContextAware = TraceContextAware.default
+
+ @Pointcut("execution(* play.api.LoggerLike+.info(..))")
+ def infoPointcut(): Unit = {}
+
+ @Pointcut("execution(* play.api.LoggerLike+.warn(..))")
+ def warnPointcut(): Unit = {}
+
+ @Pointcut("execution(* play.api.LoggerLike+.error(..))")
+ def errorPointcut(): Unit = {}
+
+ @Pointcut("execution(* play.api.LoggerLike+.trace(..))")
+ def tracePointcut(): Unit = {}
+
+ @Around("(infoPointcut() || warnPointcut() || errorPointcut() || tracePointcut()) && this(logger)")
+ def aroundLog(pjp: ProceedingJoinPoint, logger: TraceContextAware): Any = {
+ withMDC(logger.traceContext) {
+ pjp.proceed()
+ }
+ }
+}
+
+object LoggerLikeInstrumentation {
+ def withMDC[A](currentContext: Option[TraceContext])(block: ⇒ A): A = {
+ val keys = currentContext.map(extractProperties).map(putAndExtractKeys)
+
+ try block finally keys.map(k ⇒ k.foreach(MDC.remove(_)))
+ }
+
+ def putAndExtractKeys(values: Iterable[Map[String, Any]]): Iterable[String] = values.map {
+ value ⇒ value.map { case (key, value) ⇒ MDC.put(key, value.toString); key }
+ }.flatten
+
+ def extractProperties(ctx: TraceContext): Iterable[Map[String, Any]] = ctx.traceLocalStorage.underlyingStorage.values.map {
+ case traceLocalValue @ (p: Product) ⇒ {
+ val properties = p.productIterator
+ traceLocalValue.getClass.getDeclaredFields.filter(field ⇒ field.getName != "$outer").map(_.getName -> properties.next).toMap
+ }
+ case anything ⇒ Map.empty[String, Any]
+ }
+}
+
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
index 00170b1b..975510e9 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
@@ -1,23 +1,22 @@
-/* ===================================================
+/* =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ========================================================== */
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
package kamon.play.instrumentation
import kamon.Kamon
-import kamon.play.Play
+import kamon.play.{ PlayExtension, Play }
import kamon.trace.{ TraceContextAware, TraceRecorder }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -49,15 +48,21 @@ class RequestInstrumentation {
}
@Around("execution(* play.api.GlobalSettings+.doFilter(*)) && args(next)")
- def afterDoFilter(pjp: ProceedingJoinPoint, next: EssentialAction): Any = {
+ def aroundDoFilter(pjp: ProceedingJoinPoint, next: EssentialAction): Any = {
val essentialAction = (requestHeader: RequestHeader) ⇒ {
val incomingContext = TraceRecorder.currentContext
- val executor = Kamon(Play)(Akka.system()).defaultDispatcher
+ val playExtension = Kamon(Play)(Akka.system())
+ val executor = playExtension.defaultDispatcher
next(requestHeader).map {
result ⇒
+ TraceRecorder.currentContext.map { ctx ⇒
+ recordHttpServerMetrics(result, ctx.name, playExtension)
+ }
+
TraceRecorder.finish()
+
incomingContext match {
case None ⇒ result
case Some(traceContext) ⇒
@@ -71,6 +76,9 @@ class RequestInstrumentation {
pjp.proceed(Array(EssentialAction(essentialAction)))
}
+ def recordHttpServerMetrics(result: Result, traceName: String, playExtension: PlayExtension): Unit =
+ playExtension.httpServerMetrics.recordResponse(traceName, result.header.status.toString, 1L)
+
@Around("execution(* play.api.GlobalSettings+.onError(..)) && args(request, ex)")
def aroundOnError(pjp: ProceedingJoinPoint, request: TraceContextAware, ex: Throwable): Any = request.traceContext match {
case None ⇒ pjp.proceed()
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
index 0951d2c9..2862ba19 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
@@ -19,7 +19,7 @@ package kamon.play.instrumentation
import org.aspectj.lang.annotation.{ Around, Pointcut, Aspect }
import org.aspectj.lang.ProceedingJoinPoint
import kamon.trace.TraceRecorder
-import kamon.metrics.TraceMetrics.HttpClientRequest
+import kamon.metric.TraceMetrics.HttpClientRequest
import play.api.libs.ws.WSRequest
import scala.concurrent.Future
import play.api.libs.ws.WSResponse
@@ -36,7 +36,7 @@ class WSInstrumentation {
def aroundExecuteRequest(pjp: ProceedingJoinPoint, request: WSRequest): Any = {
import WSInstrumentation._
- val completionHandle = TraceRecorder.startSegment(HttpClientRequest(request.url, UserTime), basicRequestAttributes(request))
+ val completionHandle = TraceRecorder.startSegment(HttpClientRequest(request.url), basicRequestAttributes(request))
val response = pjp.proceed().asInstanceOf[Future[WSResponse]]
@@ -50,7 +50,6 @@ class WSInstrumentation {
}
object WSInstrumentation {
- val UserTime = "UserTime"
def basicRequestAttributes(request: WSRequest): Map[String, String] = {
Map[String, String](
diff --git a/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala
new file mode 100644
index 00000000..c41f7004
--- /dev/null
+++ b/kamon-play/src/test/scala/kamon/play/LoggerLikeInstrumentationSpec.scala
@@ -0,0 +1,122 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License") you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.play
+
+import ch.qos.logback.classic.spi.ILoggingEvent
+import ch.qos.logback.classic.{ AsyncAppender, LoggerContext }
+import ch.qos.logback.core.read.ListAppender
+import ch.qos.logback.core.status.NopStatusListener
+import kamon.trace.TraceLocal
+import org.scalatest.BeforeAndAfter
+import org.scalatestplus.play._
+import org.slf4j
+import play.api.LoggerLike
+import play.api.mvc.Results.Ok
+import play.api.mvc._
+import play.api.test.Helpers._
+import play.api.test._
+
+import scala.concurrent.Future
+
+class LoggerLikeInstrumentationSpec extends PlaySpec with OneServerPerSuite with BeforeAndAfter {
+
+ System.setProperty("config.file", "./kamon-play/src/test/resources/conf/application.conf")
+
+ val executor = scala.concurrent.ExecutionContext.Implicits.global
+
+ val infoMessage = "Info Message"
+ val headerValue = "My header value"
+ val otherValue = "My other value"
+
+ case class LocalStorageValue(header: String, other: String)
+
+ object TraceLocalKey extends TraceLocal.TraceLocalKey {
+ type ValueType = LocalStorageValue
+ }
+
+ before {
+ LoggingHandler.startLogging()
+ }
+
+ after {
+ LoggingHandler.stopLogging()
+ }
+
+ implicit override lazy val app = FakeApplication(withRoutes = {
+
+ case ("GET", "/logging") ⇒
+ Action.async {
+ Future {
+ TraceLocal.store(TraceLocalKey)(LocalStorageValue(headerValue, otherValue))
+ LoggingHandler.info(infoMessage)
+ Ok("OK")
+ }(executor)
+ }
+ })
+
+ "the LoggerLike instrumentation" should {
+ "be put the properties of TraceLocal into the MDC as key -> value in a request" in {
+ LoggingHandler.appenderStart()
+
+ val Some(result) = route(FakeRequest(GET, "/logging"))
+ Thread.sleep(500) // wait to complete the future
+ TraceLocal.retrieve(TraceLocalKey) must be(Some(LocalStorageValue(headerValue, otherValue)))
+
+ LoggingHandler.appenderStop()
+
+ headerValue must be(LoggingHandler.getValueFromMDC("header"))
+ otherValue must be(LoggingHandler.getValueFromMDC("other"))
+ }
+ }
+}
+
+object LoggingHandler extends LoggerLike {
+
+ val loggerContext = new LoggerContext()
+ val rootLogger = loggerContext.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME)
+ val asyncAppender = new AsyncAppender()
+ val listAppender = new ListAppender[ILoggingEvent]()
+ val nopStatusListener = new NopStatusListener()
+
+ override val logger: slf4j.Logger = rootLogger
+
+ def startLogging(): Unit = {
+ loggerContext.getStatusManager().add(nopStatusListener)
+ asyncAppender.setContext(loggerContext)
+ listAppender.setContext(loggerContext)
+ listAppender.setName("list")
+ listAppender.start()
+ }
+
+ def stopLogging(): Unit = {
+ listAppender.stop()
+ }
+
+ def appenderStart(): Unit = {
+ asyncAppender.addAppender(listAppender)
+ asyncAppender.start()
+ rootLogger.addAppender(asyncAppender)
+ }
+
+ def appenderStop(): Unit = {
+ asyncAppender.stop()
+ }
+
+ def getValueFromMDC(key: String): String = {
+ listAppender.list.get(0).getMDCPropertyMap.get(key)
+ }
+}
+
diff --git a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
index 710c6ed5..fc195580 100644
--- a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
@@ -1,21 +1,24 @@
-/* ===================================================
+/* =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ========================================================== */
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
package kamon.play
+import scala.concurrent.duration._
+import kamon.Kamon
+import kamon.http.HttpServerMetrics
+import kamon.metric.{ CollectionContext, Metrics }
import kamon.play.action.TraceName
import kamon.trace.{ TraceLocal, TraceRecorder }
import org.scalatestplus.play._
@@ -24,8 +27,9 @@ import play.api.mvc.Results.Ok
import play.api.mvc._
import play.api.test.Helpers._
import play.api.test._
+import play.libs.Akka
-import scala.concurrent.Future
+import scala.concurrent.{ Await, Future }
class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
@@ -108,6 +112,25 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
val Some(result) = route(FakeRequest(GET, "/retrieve").withHeaders(traceTokenHeader, traceLocalStorageHeader))
TraceLocal.retrieve(TraceLocalKey).get must be(traceLocalStorageValue)
}
+
+ "record http server metrics for all processed requests" in {
+ val collectionContext = CollectionContext(100)
+ Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+
+ for (repetition ← 1 to 10) {
+ Await.result(route(FakeRequest(GET, "/default").withHeaders(traceTokenHeader)).get, 10 seconds)
+ }
+
+ for (repetition ← 1 to 5) {
+ Await.result(route(FakeRequest(GET, "/notFound").withHeaders(traceTokenHeader)).get, 10 seconds)
+ }
+
+ val snapshot = Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ snapshot.countsPerTraceAndStatusCode("GET: /default")("200").count must be(10)
+ snapshot.countsPerTraceAndStatusCode("GET: /notFound")("404").count must be(5)
+ snapshot.countsPerStatusCode("200").count must be(10)
+ snapshot.countsPerStatusCode("404").count must be(5)
+ }
}
object MockGlobalTest extends WithFilters(TraceLocalFilter)
diff --git a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
index 0c3783bb..775d3e26 100644
--- a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
@@ -28,9 +28,9 @@ import akka.testkit.{ TestKitBase, TestProbe }
import com.typesafe.config.ConfigFactory
import org.scalatest.{ Matchers, WordSpecLike }
import kamon.Kamon
-import kamon.metrics.{ TraceMetrics, Metrics }
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.TraceMetrics.ElapsedTime
+import kamon.metric.{ TraceMetrics, Metrics }
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.TraceMetrics.ElapsedTime
class WSInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with OneServerPerSuite {
diff --git a/kamon-playground/src/main/resources/application.conf b/kamon-playground/src/main/resources/application.conf
index 3d57141a..bac3c97e 100644
--- a/kamon-playground/src/main/resources/application.conf
+++ b/kamon-playground/src/main/resources/application.conf
@@ -1,6 +1,7 @@
akka {
loglevel = INFO
+ extensions = ["kamon.logreporter.LogReporter"]
actor {
debug {
@@ -18,13 +19,13 @@ spray.can {
kamon {
newrelic {
app-name = "SimpleRequestProcessor"
- license-key = 2e24765acb032cb9e7207013b5ba3e2ab7d2d75c
+ license-key = e7d350b14228f3d28f35bc3140df2c3e565ea5d5
}
}
kamon {
metrics {
- tick-interval = 1 second
+ tick-interval = 10 second
filters = [
{
diff --git a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
index 664bd4f9..301a9bbd 100644
--- a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
+++ b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
@@ -26,9 +26,9 @@ import scala.util.Random
import akka.routing.RoundRobinPool
import kamon.trace.TraceRecorder
import kamon.Kamon
-import kamon.metrics._
+import kamon.metric._
import spray.http.{ StatusCodes, Uri }
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuilding with KamonTraceDirectives {
import scala.concurrent.duration._
@@ -51,12 +51,23 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
implicit val timeout = Timeout(30 seconds)
+ val counter = Kamon(UserMetrics).registerCounter("requests")
+ Kamon(UserMetrics).registerCounter("requests-2")
+ Kamon(UserMetrics).registerCounter("requests-3")
+
+ Kamon(UserMetrics).registerHistogram("histogram-1")
+ Kamon(UserMetrics).registerHistogram("histogram-2")
+
+ Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-1")
+ Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-2")
+ Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-3")
+
+ //Kamon(UserMetrics).registerGauge("test-gauge")(() => 10L)
+
val pipeline = sendReceive
val replier = system.actorOf(Props[Replier].withRouter(RoundRobinPool(nrOfInstances = 2)), "replier")
val random = new Random()
- val requestCountRecorder = Kamon(Metrics).register(CustomMetric("GetCount"), CustomMetric.histogram(10, 3, Scale.Unit))
-
startServer(interface = "localhost", port = 9090) {
get {
path("test") {
@@ -87,7 +98,6 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
path("ok") {
traceName("OK") {
complete {
- requestCountRecorder.map(_.record(1))
"ok"
}
}
@@ -95,6 +105,7 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
path("future") {
traceName("OK-Future") {
dynamic {
+ counter.increment()
complete(Future { "OK" })
}
}
diff --git a/kamon-spray/src/main/scala/kamon/spray/Spray.scala b/kamon-spray/src/main/scala/kamon/spray/Spray.scala
index 9de1882a..76adb214 100644
--- a/kamon-spray/src/main/scala/kamon/spray/Spray.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/Spray.scala
@@ -19,6 +19,8 @@ package kamon.spray
import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
import akka.actor
import kamon.Kamon
+import kamon.http.HttpServerMetrics
+import kamon.metric.Metrics
import spray.http.HttpRequest
object Spray extends ExtensionId[SprayExtension] with ExtensionIdProvider {
@@ -38,6 +40,8 @@ class SprayExtension(private val system: ExtendedActorSystem) extends Kamon.Exte
val includeTraceToken: Boolean = config.getBoolean("automatic-trace-token-propagation")
val traceTokenHeaderName: String = config.getString("trace-token-header-name")
+ val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
+ // It's safe to assume that HttpServerMetrics will always exist because there is no particular filter for it.
val clientSegmentCollectionStrategy: ClientSegmentCollectionStrategy.Strategy =
config.getString("client.segment-collection-strategy") match {
diff --git a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala b/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala
index d7d9cf09..d787bda4 100644
--- a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala
@@ -21,7 +21,7 @@ import org.aspectj.lang.ProceedingJoinPoint
import spray.http.{ HttpHeader, HttpResponse, HttpMessageEnd, HttpRequest }
import spray.http.HttpHeaders.{ RawHeader, Host }
import kamon.trace.{ TraceRecorder, SegmentCompletionHandleAware }
-import kamon.metrics.TraceMetrics.HttpClientRequest
+import kamon.metric.TraceMetrics.HttpClientRequest
import kamon.Kamon
import kamon.spray.{ ClientSegmentCollectionStrategy, Spray }
import akka.actor.ActorRef
@@ -30,7 +30,6 @@ import akka.util.Timeout
@Aspect
class ClientRequestInstrumentation {
- import ClientRequestInstrumentation._
@DeclareMixin("spray.can.client.HttpHostConnector.RequestContext")
def mixin: SegmentCompletionHandleAware = SegmentCompletionHandleAware.default
@@ -51,7 +50,7 @@ class ClientRequestInstrumentation {
if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Internal) {
val requestAttributes = basicRequestAttributes(request)
val clientRequestName = sprayExtension.assignHttpClientRequestName(request)
- val completionHandle = traceContext.startSegment(HttpClientRequest(clientRequestName, SprayTime), requestAttributes)
+ val completionHandle = traceContext.startSegment(HttpClientRequest(clientRequestName), requestAttributes)
ctx.segmentCompletionHandle = Some(completionHandle)
}
@@ -102,7 +101,7 @@ class ClientRequestInstrumentation {
if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Pipelining) {
val requestAttributes = basicRequestAttributes(request)
val clientRequestName = sprayExtension.assignHttpClientRequestName(request)
- val completionHandle = traceContext.startSegment(HttpClientRequest(clientRequestName, UserTime), requestAttributes)
+ val completionHandle = traceContext.startSegment(HttpClientRequest(clientRequestName), requestAttributes)
responseFuture.onComplete { result ⇒
completionHandle.finish(Map.empty)
@@ -139,8 +138,3 @@ class ClientRequestInstrumentation {
pjp.proceed(Array(modifiedHeaders))
}
}
-
-object ClientRequestInstrumentation {
- val SprayTime = "SprayTime"
- val UserTime = "UserTime"
-}
diff --git a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala b/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala
index 85782e22..69b0160e 100644
--- a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala
@@ -22,7 +22,7 @@ import spray.http.{ HttpResponse, HttpMessagePartWrapper, HttpRequest }
import akka.event.Logging.Warning
import scala.Some
import kamon.Kamon
-import kamon.spray.Spray
+import kamon.spray.{ SprayExtension, Spray }
import org.aspectj.lang.ProceedingJoinPoint
import spray.http.HttpHeaders.RawHeader
@@ -68,20 +68,21 @@ class ServerRequestInstrumentation {
val storedContext = openRequest.traceContext
verifyTraceContextConsistency(incomingContext, storedContext)
- val proceedResult = incomingContext match {
+ incomingContext match {
case None ⇒ pjp.proceed()
case Some(traceContext) ⇒
val sprayExtension = Kamon(Spray)(traceContext.system)
- if (sprayExtension.includeTraceToken) {
+ val proceedResult = if (sprayExtension.includeTraceToken) {
val responseWithHeader = includeTraceTokenIfPossible(response, sprayExtension.traceTokenHeaderName, traceContext.token)
pjp.proceed(Array(openRequest, responseWithHeader))
} else pjp.proceed
- }
- TraceRecorder.finish()
- proceedResult
+ TraceRecorder.finish()
+ recordHttpServerMetrics(response, traceContext.name, sprayExtension)
+ proceedResult
+ }
}
def verifyTraceContextConsistency(incomingTraceContext: Option[TraceContext], storedTraceContext: Option[TraceContext]): Unit = {
@@ -102,6 +103,12 @@ class ServerRequestInstrumentation {
}
+ def recordHttpServerMetrics(response: HttpMessagePartWrapper, traceName: String, sprayExtension: SprayExtension): Unit =
+ response match {
+ case httpResponse: HttpResponse ⇒ sprayExtension.httpServerMetrics.recordResponse(traceName, httpResponse.status.intValue.toString)
+ case other ⇒ // Nothing to do then.
+ }
+
def includeTraceTokenIfPossible(response: HttpMessagePartWrapper, traceTokenHeaderName: String, token: String): HttpMessagePartWrapper =
response match {
case response: HttpResponse ⇒ response.withHeaders(response.headers ::: RawHeader(traceTokenHeaderName, token) :: Nil)
diff --git a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
index 9469924a..54329645 100644
--- a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
@@ -26,13 +26,12 @@ import com.typesafe.config.ConfigFactory
import spray.can.Http
import spray.http.HttpHeaders.RawHeader
import kamon.Kamon
-import kamon.metrics.{ TraceMetrics, Metrics }
+import kamon.metric.{ TraceMetrics, Metrics }
import spray.client.pipelining
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import spray.can.client.ClientRequestInstrumentation
+import kamon.metric.Subscriptions.TickMetricSnapshot
import scala.concurrent.duration._
import akka.pattern.pipe
-import kamon.metrics.TraceMetrics.TraceMetricSnapshot
+import kamon.metric.TraceMetrics.{ HttpClientRequest, TraceMetricsSnapshot }
class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding with TestServer {
implicit lazy val system: ActorSystem = ActorSystem("client-request-instrumentation-spec", ConfigFactory.parseString(
@@ -149,7 +148,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val traceMetrics = expectTraceMetrics("pipelining-strategy-client-request", metricListener, 3 seconds)
traceMetrics.elapsedTime.numberOfMeasurements should be(1L)
traceMetrics.segments should not be empty
- val recordedSegment = traceMetrics.segments.find { case (k, v) ⇒ k.tag == ClientRequestInstrumentation.UserTime } map (_._2)
+ val recordedSegment = traceMetrics.segments.find { case (k, v) ⇒ k.isInstanceOf[HttpClientRequest] } map (_._2)
recordedSegment should not be empty
recordedSegment map { segmentMetrics ⇒
segmentMetrics.numberOfMeasurements should be(1L)
@@ -190,7 +189,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val traceMetrics = expectTraceMetrics("internal-strategy-client-request", metricListener, 3 seconds)
traceMetrics.elapsedTime.numberOfMeasurements should be(1L)
traceMetrics.segments should not be empty
- val recordedSegment = traceMetrics.segments.find { case (k, v) ⇒ k.tag == ClientRequestInstrumentation.SprayTime } map (_._2)
+ val recordedSegment = traceMetrics.segments.find { case (k, v) ⇒ k.isInstanceOf[HttpClientRequest] } map (_._2)
recordedSegment should not be empty
recordedSegment map { segmentMetrics ⇒
segmentMetrics.numberOfMeasurements should be(1L)
@@ -199,14 +198,14 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
}
}
- def expectTraceMetrics(traceName: String, listener: TestProbe, timeout: FiniteDuration): TraceMetricSnapshot = {
+ def expectTraceMetrics(traceName: String, listener: TestProbe, timeout: FiniteDuration): TraceMetricsSnapshot = {
val tickSnapshot = within(timeout) {
listener.expectMsgType[TickMetricSnapshot]
}
val metricsOption = tickSnapshot.metrics.get(TraceMetrics(traceName))
metricsOption should not be empty
- metricsOption.get.asInstanceOf[TraceMetricSnapshot]
+ metricsOption.get.asInstanceOf[TraceMetricsSnapshot]
}
def enableInternalSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientSegmentCollectionStrategy.Internal)
diff --git a/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
new file mode 100644
index 00000000..c4b370d7
--- /dev/null
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
@@ -0,0 +1,91 @@
+package kamon.spray
+
+import akka.actor.ActorSystem
+import akka.testkit.{ TestProbe, TestKitBase }
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.http.HttpServerMetrics
+import kamon.metric._
+import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
+import org.scalatest.{ Matchers, WordSpecLike }
+import spray.http.{ StatusCodes, HttpResponse, HttpRequest }
+import spray.httpx.RequestBuilding
+
+class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
+ with ScalaFutures with PatienceConfiguration with TestServer {
+
+ val collectionContext = CollectionContext(100)
+
+ implicit lazy val system: ActorSystem = ActorSystem("spray-server-metrics-spec", ConfigFactory.parseString(
+ """
+ |akka {
+ | loglevel = ERROR
+ |}
+ |
+ |kamon {
+ | metrics {
+ | tick-interval = 1 hour
+ |
+ | filters = [
+ | {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = []
+ | }
+ | }
+ | ]
+ | }
+ |}
+ """.stripMargin))
+
+ "the Spray Server metrics instrumentation" should {
+ "record trace metrics for requests received" in {
+ Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
+ val (connection, server) = buildClientConnectionAndServer
+ val client = TestProbe()
+
+ for (repetition ← 1 to 10) {
+ client.send(connection, Get("/record-trace-metrics"))
+ server.expectMsgType[HttpRequest]
+ server.reply(HttpResponse(entity = "ok"))
+ client.expectMsgType[HttpResponse]
+ }
+
+ for (repetition ← 1 to 5) {
+ client.send(connection, Get("/record-trace-metrics"))
+ server.expectMsgType[HttpRequest]
+ server.reply(HttpResponse(entity = "bad-request", status = StatusCodes.BadRequest))
+ client.expectMsgType[HttpResponse]
+ }
+
+ val snapshot = Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
+ snapshot.elapsedTime.numberOfMeasurements should be(15)
+ }
+
+ "record http serve metrics for all the requests" in {
+ Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ val (connection, server) = buildClientConnectionAndServer
+ val client = TestProbe()
+
+ for (repetition ← 1 to 10) {
+ client.send(connection, Get("/record-http-metrics"))
+ server.expectMsgType[HttpRequest]
+ server.reply(HttpResponse(entity = "ok"))
+ client.expectMsgType[HttpResponse]
+ }
+
+ for (repetition ← 1 to 5) {
+ client.send(connection, Get("/record-http-metrics"))
+ server.expectMsgType[HttpRequest]
+ server.reply(HttpResponse(entity = "bad-request", status = StatusCodes.BadRequest))
+ client.expectMsgType[HttpResponse]
+ }
+
+ val snapshot = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("200").count should be(10)
+ snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("400").count should be(5)
+ snapshot.countsPerStatusCode("200").count should be(10)
+ snapshot.countsPerStatusCode("400").count should be(5)
+ }
+ }
+}
diff --git a/kamon-spray/src/test/scala/kamon/spray/ServerRequestInstrumentationSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
index 7edbbe11..48253b1d 100644
--- a/kamon-spray/src/test/scala/kamon/spray/ServerRequestInstrumentationSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
@@ -24,15 +24,16 @@ import kamon.Kamon
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
import spray.http.HttpHeaders.RawHeader
import spray.http.{ HttpResponse, HttpRequest }
-import kamon.metrics.{ TraceMetrics, Metrics }
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.{ TraceMetrics, Metrics }
+import kamon.metric.Subscriptions.TickMetricSnapshot
import com.typesafe.config.ConfigFactory
-import kamon.metrics.TraceMetrics.ElapsedTime
+import kamon.metric.TraceMetrics.ElapsedTime
+import kamon.metric.instrument.Histogram
-class ServerRequestInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
+class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
with ScalaFutures with PatienceConfiguration with TestServer {
- implicit lazy val system: ActorSystem = ActorSystem("client-pipelining-segment-strategy-instrumentation-spec", ConfigFactory.parseString(
+ implicit lazy val system: ActorSystem = ActorSystem("spray-server-tracing-spec", ConfigFactory.parseString(
"""
|akka {
| loglevel = ERROR
@@ -51,12 +52,6 @@ class ServerRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
| }
| ]
| }
- |
- | spray {
- | client {
- | segment-collection-strategy = internal
- | }
- | }
|}
""".stripMargin))
@@ -103,29 +98,6 @@ class ServerRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
response.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-disabled"))
}
-
- "open and finish a trace during the lifetime of a request" in {
- val (connection, server) = buildClientConnectionAndServer
- val client = TestProbe()
-
- val metricListener = TestProbe()
- Kamon(Metrics)(system).subscribe(TraceMetrics, "*", metricListener.ref, permanently = true)
- metricListener.expectMsgType[TickMetricSnapshot]
-
- client.send(connection, Get("/open-and-finish"))
- server.expectMsgType[HttpRequest]
- server.reply(HttpResponse(entity = "ok"))
- client.expectMsgType[HttpResponse]
-
- val tickSnapshot = metricListener.expectMsgType[TickMetricSnapshot]
- val traceMetrics = tickSnapshot.metrics.find { case (k, v) ⇒ k.name.contains("open-and-finish") } map (_._2.metrics)
- traceMetrics should not be empty
-
- traceMetrics map { metrics ⇒
- metrics(ElapsedTime).numberOfMeasurements should be(1L)
- }
- }
-
}
def enableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(true)
diff --git a/kamon-spray/src/test/scala/kamon/spray/TestServer.scala b/kamon-spray/src/test/scala/kamon/spray/TestServer.scala
index 81242133..65506770 100644
--- a/kamon-spray/src/test/scala/kamon/spray/TestServer.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/TestServer.scala
@@ -29,11 +29,9 @@ trait TestServer {
def buildClientConnectionAndServer: (ActorRef, TestProbe) = {
val serverHandler = TestProbe()
IO(Http).tell(Http.Bind(listener = serverHandler.ref, interface = "127.0.0.1", port = 0), serverHandler.ref)
- val bound = within(10 seconds) {
- serverHandler.expectMsgType[Bound]
- }
-
+ val bound = serverHandler.expectMsgType[Bound](10 seconds)
val client = clientConnection(bound)
+
serverHandler.expectMsgType[Http.Connected]
serverHandler.reply(Http.Register(serverHandler.ref))
@@ -50,10 +48,7 @@ trait TestServer {
def buildSHostConnectorAndServer: (ActorRef, TestProbe) = {
val serverHandler = TestProbe()
IO(Http).tell(Http.Bind(listener = serverHandler.ref, interface = "127.0.0.1", port = 0), serverHandler.ref)
- val bound = within(10 seconds) {
- serverHandler.expectMsgType[Bound]
- }
-
+ val bound = serverHandler.expectMsgType[Bound](10 seconds)
val client = httpHostConnector(bound)
(client, serverHandler)
diff --git a/kamon-statsd/src/main/resources/reference.conf b/kamon-statsd/src/main/resources/reference.conf
index fd6293d9..eac5eade 100644
--- a/kamon-statsd/src/main/resources/reference.conf
+++ b/kamon-statsd/src/main/resources/reference.conf
@@ -24,6 +24,10 @@ kamon {
dispatcher = [ "*" ]
}
+ # Enable system metrics
+ # In order to not get a ClassNotFoundException, we must register the kamon-sytem-metrics module
+ report-system-metrics = false
+
simple-metric-key-generator {
# Application prefix for all metrics pushed to StatsD. The default namespacing scheme for metrics follows
# this pattern:
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
index 1b3daa97..e9350fae 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
@@ -18,6 +18,7 @@ package kamon.statsd
import akka.actor._
import kamon.Kamon
+import kamon.metric._
import kamon.metrics._
import scala.concurrent.duration._
import scala.collection.JavaConverters._
@@ -32,6 +33,8 @@ object StatsD extends ExtensionId[StatsDExtension] with ExtensionIdProvider {
override def createExtension(system: ExtendedActorSystem): StatsDExtension = new StatsDExtension(system)
trait MetricKeyGenerator {
+ def localhostName: String
+ def normalizedLocalhostName: String
def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
}
}
@@ -67,24 +70,44 @@ class StatsDExtension(system: ExtendedActorSystem) extends Kamon.Extension {
Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, statsDMetricsListener, permanently = true)
}
+ // Subscribe to SystemMetrics
+ val includeSystemMetrics = statsDConfig.getBoolean("report-system-metrics")
+ if (includeSystemMetrics) {
+ List(CPUMetrics, ProcessCPUMetrics, MemoryMetrics, NetworkMetrics, GCMetrics, HeapMetrics) foreach { metric ⇒
+ Kamon(Metrics)(system).subscribe(metric, "*", statsDMetricsListener, permanently = true)
+ }
+ }
+
def buildMetricsListener(tickInterval: Long, flushInterval: Long): ActorRef = {
assert(flushInterval >= tickInterval, "StatsD flush-interval needs to be equal or greater to the tick-interval")
+ val defaultMetricKeyGenerator = new SimpleMetricKeyGenerator(system.settings.config)
+
+ val metricsSender = system.actorOf(StatsDMetricsSender.props(
+ statsDHost,
+ maxPacketSizeInBytes,
+ defaultMetricKeyGenerator), "statsd-metrics-sender")
- val metricsTranslator = system.actorOf(StatsDMetricsSender.props(statsDHost, maxPacketSizeInBytes), "statsd-metrics-sender")
if (flushInterval == tickInterval) {
// No need to buffer the metrics, let's go straight to the metrics sender.
- metricsTranslator
+ metricsSender
} else {
- system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsTranslator), "statsd-metrics-buffer")
+ system.actorOf(TickMetricSnapshotBuffer.props(flushInterval.toInt.millis, metricsSender), "statsd-metrics-buffer")
}
}
}
class SimpleMetricKeyGenerator(config: Config) extends StatsD.MetricKeyGenerator {
val application = config.getString("kamon.statsd.simple-metric-key-generator.application")
- val localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
+ val _localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
+ val _normalizedLocalhostName = _localhostName.replace('.', '_')
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String =
- s"${application}.${localhostName}.${groupIdentity.category.name}.${groupIdentity.name}.${metricIdentity.name}"
+ def localhostName: String = _localhostName
+
+ def normalizedLocalhostName: String = _normalizedLocalhostName
+
+ def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
+ val normalizedGroupName = groupIdentity.name.replace(": ", "-").replace(" ", "_").replace("/", "_")
+ s"${application}.${normalizedLocalhostName}.${groupIdentity.category.name}.${normalizedGroupName}.${metricIdentity.name}"
+ }
}
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
index adda18cc..8fbf4fee 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
@@ -20,16 +20,16 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metrics.Subscriptions.TickMetricSnapshot
-import kamon.metrics.MetricSnapshot.Measurement
-import kamon.metrics.InstrumentTypes.{ Counter, Gauge, Histogram, InstrumentType }
+import kamon.metric.Subscriptions.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
import java.util.Locale
-class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long) extends Actor with UdpExtensionProvider {
+import kamon.metric.instrument.{ Counter, Histogram }
+
+class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long, metricKeyGenerator: StatsD.MetricKeyGenerator)
+ extends Actor with UdpExtensionProvider {
import context.system
- val metricKeyGenerator = new SimpleMetricKeyGenerator(context.system.settings.config)
val symbols = DecimalFormatSymbols.getInstance(Locale.US)
symbols.setDecimalSeparator('.') // Just in case there is some weird locale config we are not aware of.
@@ -48,7 +48,7 @@ class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long)
}
def writeMetricsToRemote(tick: TickMetricSnapshot, udpSender: ActorRef): Unit = {
- val dataBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
+ val packetBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
for (
(groupIdentity, groupSnapshot) ← tick.metrics;
@@ -57,29 +57,31 @@ class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long)
val key = metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
- for (measurement ← metricSnapshot.measurements) {
- val measurementData = encodeMeasurement(measurement, metricSnapshot.instrumentType)
- dataBuilder.appendMeasurement(key, measurementData)
+ metricSnapshot match {
+ case hs: Histogram.Snapshot ⇒
+ hs.recordsIterator.foreach { record ⇒
+ packetBuilder.appendMeasurement(key, encodeStatsDTimer(record.level, record.count))
+ }
+
+ case cs: Counter.Snapshot ⇒
+ packetBuilder.appendMeasurement(key, encodeStatsDCounter(cs.count))
}
}
- dataBuilder.flush()
+ packetBuilder.flush()
}
- def encodeMeasurement(measurement: Measurement, instrumentType: InstrumentType): String = {
- def statsDMetricFormat(value: String, metricType: String, samplingRate: Double = 1D): String =
- value + "|" + metricType + (if (samplingRate != 1D) "|@" + samplingRateFormat.format(samplingRate) else "")
-
- instrumentType match {
- case Histogram ⇒ statsDMetricFormat(measurement.value.toString, "ms", (1D / measurement.count))
- case Gauge ⇒ statsDMetricFormat(measurement.value.toString, "g")
- case Counter ⇒ statsDMetricFormat(measurement.count.toString, "c")
- }
+ def encodeStatsDTimer(level: Long, count: Long): String = {
+ val samplingRate: Double = 1D / count
+ level.toString + "|ms" + (if (samplingRate != 1D) "|@" + samplingRateFormat.format(samplingRate) else "")
}
+
+ def encodeStatsDCounter(count: Long): String = count.toString + "|c"
}
object StatsDMetricsSender {
- def props(remote: InetSocketAddress, maxPacketSize: Long): Props = Props(new StatsDMetricsSender(remote, maxPacketSize))
+ def props(remote: InetSocketAddress, maxPacketSize: Long, metricKeyGenerator: StatsD.MetricKeyGenerator): Props =
+ Props(new StatsDMetricsSender(remote, maxPacketSize, metricKeyGenerator))
}
trait UdpExtensionProvider {
diff --git a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
index 9dfd05f7..3bc1364c 100644
--- a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
+++ b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
@@ -18,63 +18,83 @@ package kamon.statsd
import akka.testkit.{ TestKitBase, TestProbe }
import akka.actor.{ ActorRef, Props, ActorSystem }
+import kamon.Kamon
+import kamon.metric.instrument.Histogram.Precision
+import kamon.metric.instrument.Histogram
import org.scalatest.{ Matchers, WordSpecLike }
-import kamon.metrics._
+import kamon.metric._
import akka.io.Udp
-import org.HdrHistogram.HdrRecorder
-import kamon.metrics.Subscriptions.TickMetricSnapshot
+import kamon.metric.Subscriptions.TickMetricSnapshot
import java.lang.management.ManagementFactory
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers {
- implicit lazy val system = ActorSystem("statsd-metric-sender-spec",
- ConfigFactory.parseString("kamon.statsd.max-packet-size = 256 bytes"))
+ implicit lazy val system: ActorSystem = ActorSystem("statsd-metric-sender-spec", ConfigFactory.parseString(
+ """
+ |kamon {
+ | metrics {
+ | disable-aspectj-weaver-missing-error = true
+ | }
+ |
+ | statsd {
+ | max-packet-size = 256 bytes
+ | }
+ |}
+ |
+ """.stripMargin))
+
+ val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
"the StatsDMetricSender" should {
+ "normalize the group entity name to remove spaces, colons and replace '/' with '_'" in new UdpListenerFixture {
+ val testMetricKey = buildMetricKey("trace", "POST: /kamon/example", "elapsed-time")
+ testMetricKey should be(s"kamon.localhost_local.trace.POST-_kamon_example.elapsed-time")
+ }
+
"flush the metrics data after processing the tick, even if the max-packet-size is not reached" in new UdpListenerFixture {
- val testMetricName = "test-metric"
- val testMetricKey = buildMetricKey(testMetricName)
- val testRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val testMetricName = "processing-time"
+ val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
+ val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
testRecorder.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms")
}
"render several measurements of the same key under a single (key + multiple measurements) packet" in new UdpListenerFixture {
- val testMetricName = "test-metric"
- val testMetricKey = buildMetricKey(testMetricName)
- val testRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val testMetricName = "processing-time"
+ val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
+ val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
testRecorder.record(10L)
testRecorder.record(11L)
testRecorder.record(12L)
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms:11|ms:12|ms")
}
"include the correspondent sampling rate when rendering multiple occurrences of the same value" in new UdpListenerFixture {
- val testMetricName = "test-metric"
- val testMetricKey = buildMetricKey(testMetricName)
- val testRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val testMetricName = "processing-time"
+ val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
+ val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
testRecorder.record(10L)
testRecorder.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms|@0.5")
}
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
- val testMetricName = "test-metric"
- val testMetricKey = buildMetricKey(testMetricName)
- val testRecorder = HdrRecorder(testMaxPacketSize, 3, Scale.Unit)
+ val testMetricName = "processing-time"
+ val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
+ val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
var bytes = testMetricKey.length
var level = 0
@@ -84,7 +104,7 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
bytes += s":$level|ms".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect()))
+ val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
@@ -93,12 +113,12 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
"render multiple keys in the same packet using newline as separator" in new UdpListenerFixture {
val firstTestMetricName = "first-test-metric"
- val firstTestMetricKey = buildMetricKey(firstTestMetricName)
+ val firstTestMetricKey = buildMetricKey("actor", "/user/kamon", firstTestMetricName)
val secondTestMetricName = "second-test-metric"
- val secondTestMetricKey = buildMetricKey(secondTestMetricName)
+ val secondTestMetricKey = buildMetricKey("actor", "/user/kamon", secondTestMetricName)
- val firstTestRecorder = HdrRecorder(1000L, 2, Scale.Unit)
- val secondTestRecorder = HdrRecorder(1000L, 2, Scale.Unit)
+ val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
+ val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
firstTestRecorder.record(10L)
firstTestRecorder.record(10L)
@@ -108,8 +128,8 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
secondTestRecorder.record(21L)
val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(),
- secondTestMetricName -> secondTestRecorder.collect()))
+ firstTestMetricName -> firstTestRecorder.collect(collectionContext),
+ secondTestMetricName -> secondTestRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$firstTestMetricKey:10|ms|@0.5:11|ms\n$secondTestMetricKey:20|ms:21|ms")
@@ -117,14 +137,32 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
}
trait UdpListenerFixture {
- val localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
val testMaxPacketSize = system.settings.config.getBytes("kamon.statsd.max-packet-size")
+ val metricKeyGenerator = new SimpleMetricKeyGenerator(system.settings.config) {
+ override def normalizedLocalhostName: String = "localhost_local"
+ }
- def buildMetricKey(metricName: String): String = s"kamon.$localhostName.test-metric-category.test-group.$metricName"
+ val testGroupIdentity = new MetricGroupIdentity {
+ val name: String = "/user/kamon"
+ val category: MetricGroupCategory = new MetricGroupCategory {
+ val name: String = "actor"
+ }
+ }
- def setup(metrics: Map[String, MetricSnapshotLike]): TestProbe = {
+ def buildMetricKey(categoryName: String, entityName: String, metricName: String): String = {
+ val metricIdentity = new MetricIdentity { val name: String = metricName }
+ val groupIdentity = new MetricGroupIdentity {
+ val name: String = entityName
+ val category: MetricGroupCategory = new MetricGroupCategory {
+ val name: String = categoryName
+ }
+ }
+ metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ }
+
+ def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
val udp = TestProbe()
- val metricsSender = system.actorOf(Props(new StatsDMetricsSender(new InetSocketAddress(localhostName, 0), testMaxPacketSize) {
+ val metricsSender = system.actorOf(Props(new StatsDMetricsSender(new InetSocketAddress("127.0.0.1", 0), testMaxPacketSize, metricKeyGenerator) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
}))
@@ -132,24 +170,19 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
udp.expectMsgType[Udp.SimpleSender]
udp.reply(Udp.SimpleSenderReady)
- val testGroupIdentity = new MetricGroupIdentity {
- val name: String = "test-group"
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = "test-metric-category"
- }
- }
-
val testMetrics = for ((metricName, snapshot) ← metrics) yield {
val testMetricIdentity = new MetricIdentity {
val name: String = metricName
- val tag: String = ""
}
(testMetricIdentity, snapshot)
}
metricsSender ! TickMetricSnapshot(0, 0, Map(testGroupIdentity -> new MetricGroupSnapshot {
- val metrics: Map[MetricIdentity, MetricSnapshotLike] = testMetrics.toMap
+ type GroupSnapshotType = Histogram.Snapshot
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
+
+ val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
}))
udp
diff --git a/kamon-system-metrics/src/main/resources/reference.conf b/kamon-system-metrics/src/main/resources/reference.conf
new file mode 100644
index 00000000..0600388d
--- /dev/null
+++ b/kamon-system-metrics/src/main/resources/reference.conf
@@ -0,0 +1,76 @@
+# ============================================ #
+# Kamon-System-Metrics Reference Configuration #
+# ============================================ #
+
+kamon {
+ metrics {
+ precision {
+ system {
+ process-cpu {
+ user = {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999999999
+ significant-value-digits = 2
+ }
+ system = {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999999999
+ significant-value-digits = 2
+ }
+ }
+
+ cpu {
+ user = {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999
+ significant-value-digits = 2
+ }
+ system = {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999
+ significant-value-digits = 2
+ }
+ wait = {
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999
+ significant-value-digits = 2
+ }
+ idle ={
+ refresh-interval = 100 milliseconds
+ highest-trackable-value = 999
+ significant-value-digits = 2
+ }
+ }
+
+ network {
+ rx-bytes = ${kamon.metrics.precision.default-histogram-precision}
+ tx-bytes = ${kamon.metrics.precision.default-histogram-precision}
+ rx-errors = ${kamon.metrics.precision.default-histogram-precision}
+ tx-errors = ${kamon.metrics.precision.default-histogram-precision}
+ }
+
+ memory {
+ used = ${kamon.metrics.precision.default-histogram-precision}
+ free = ${kamon.metrics.precision.default-histogram-precision}
+ buffer = ${kamon.metrics.precision.default-histogram-precision}
+ cache = ${kamon.metrics.precision.default-histogram-precision}
+ swap-used = ${kamon.metrics.precision.default-histogram-precision}
+ swap-free = ${kamon.metrics.precision.default-histogram-precision}
+ }
+ }
+
+ jvm {
+ heap {
+ used = ${kamon.metrics.precision.default-gauge-precision}
+ max = ${kamon.metrics.precision.default-gauge-precision}
+ committed = ${kamon.metrics.precision.default-gauge-precision}
+ }
+
+ gc {
+ count = ${kamon.metrics.precision.default-gauge-precision}
+ time = ${kamon.metrics.precision.default-gauge-precision}
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
new file mode 100644
index 00000000..99288f94
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
@@ -0,0 +1,81 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.Histogram
+
+case class CPUMetrics(name: String) extends MetricGroupIdentity {
+ val category = CPUMetrics
+}
+
+object CPUMetrics extends MetricGroupCategory {
+ val name = "cpu"
+
+ case object User extends MetricIdentity { val name = "user" }
+ case object System extends MetricIdentity { val name = "system" }
+ case object Wait extends MetricIdentity { val name = "wait" }
+ case object Idle extends MetricIdentity { val name = "idle" }
+
+ case class CPUMetricRecorder(user: Histogram, system: Histogram, cpuWait: Histogram, idle: Histogram)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ CPUMetricSnapshot(user.collect(context), system.collect(context), cpuWait.collect(context), idle.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class CPUMetricSnapshot(user: Histogram.Snapshot, system: Histogram.Snapshot, cpuWait: Histogram.Snapshot, idle: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = CPUMetricSnapshot
+
+ def merge(that: CPUMetricSnapshot, context: CollectionContext): GroupSnapshotType = {
+ CPUMetricSnapshot(user.merge(that.user, context), system.merge(that.system, context), cpuWait.merge(that.cpuWait, context), idle.merge(that.idle, context))
+ }
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ User -> user,
+ System -> system,
+ Wait -> cpuWait,
+ Idle -> idle)
+ }
+
+ val Factory = new MetricGroupFactory {
+
+ type GroupRecorder = CPUMetricRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.system.cpu")
+
+ val userConfig = settings.getConfig("user")
+ val systemConfig = settings.getConfig("system")
+ val cpuWaitConfig = settings.getConfig("wait")
+ val idleConfig = settings.getConfig("idle")
+
+ new CPUMetricRecorder(
+ Histogram.fromConfig(userConfig),
+ Histogram.fromConfig(systemConfig),
+ Histogram.fromConfig(cpuWaitConfig),
+ Histogram.fromConfig(idleConfig))
+ }
+ }
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
new file mode 100644
index 00000000..b5da600e
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
@@ -0,0 +1,75 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import java.lang.management.GarbageCollectorMXBean
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.{ Gauge, Histogram }
+
+case class GCMetrics(name: String) extends MetricGroupIdentity {
+ val category = GCMetrics
+}
+
+object GCMetrics extends MetricGroupCategory {
+ val name = "gc"
+
+ case object CollectionCount extends MetricIdentity { val name = "collection-count" }
+ case object CollectionTime extends MetricIdentity { val name = "collection-time" }
+
+ case class GCMetricRecorder(count: Gauge, time: Gauge)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ GCMetricSnapshot(count.collect(context), time.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class GCMetricSnapshot(count: Histogram.Snapshot, time: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = GCMetricSnapshot
+
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
+ GCMetricSnapshot(count.merge(that.count, context), time.merge(that.time, context))
+ }
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ CollectionCount -> count,
+ CollectionTime -> time)
+ }
+
+ def Factory(gc: GarbageCollectorMXBean) = new MetricGroupFactory {
+
+ type GroupRecorder = GCMetricRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.jvm.gc")
+
+ val countConfig = settings.getConfig("count")
+ val timeConfig = settings.getConfig("time")
+
+ new GCMetricRecorder(
+ Gauge.fromConfig(countConfig, system)(() ⇒ gc.getCollectionCount),
+ Gauge.fromConfig(timeConfig, system, Scale.Milli)(() ⇒ gc.getCollectionTime))
+ }
+ }
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
new file mode 100644
index 00000000..c51b458c
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
@@ -0,0 +1,83 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import java.lang.management.ManagementFactory
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.{ Gauge, Histogram }
+
+case class HeapMetrics(name: String) extends MetricGroupIdentity {
+ val category = HeapMetrics
+}
+
+object HeapMetrics extends MetricGroupCategory {
+ val name = "heap"
+
+ case object Used extends MetricIdentity { val name = "used-heap" }
+ case object Max extends MetricIdentity { val name = "max-heap" }
+ case object Committed extends MetricIdentity { val name = "committed-heap" }
+
+ case class HeapMetricRecorder(used: Gauge, max: Gauge, committed: Gauge)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ HeapMetricSnapshot(used.collect(context), max.collect(context), committed.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class HeapMetricSnapshot(used: Histogram.Snapshot, max: Histogram.Snapshot, committed: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = HeapMetricSnapshot
+
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
+ HeapMetricSnapshot(used.merge(that.used, context), max.merge(that.max, context), committed.merge(that.committed, context))
+ }
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ Used -> used,
+ Max -> max,
+ Committed -> committed)
+ }
+
+ val Factory = new MetricGroupFactory {
+ import kamon.system.SystemMetricsExtension._
+
+ val memory = ManagementFactory.getMemoryMXBean
+ def heap = memory.getHeapMemoryUsage
+
+ type GroupRecorder = HeapMetricRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.jvm.heap")
+
+ val usedHeapConfig = settings.getConfig("used")
+ val maxHeapConfig = settings.getConfig("max")
+ val committedHeapConfig = settings.getConfig("committed")
+
+ new HeapMetricRecorder(
+ Gauge.fromConfig(usedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getUsed)),
+ Gauge.fromConfig(maxHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getMax)),
+ Gauge.fromConfig(committedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getCommitted)))
+ }
+ }
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
new file mode 100644
index 00000000..6f3eb6df
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
@@ -0,0 +1,88 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.Histogram
+
+case class MemoryMetrics(name: String) extends MetricGroupIdentity {
+ val category = MemoryMetrics
+}
+
+object MemoryMetrics extends MetricGroupCategory {
+ val name = "memory"
+
+ case object Used extends MetricIdentity { val name = "used" }
+ case object Free extends MetricIdentity { val name = "free" }
+ case object Buffer extends MetricIdentity { val name = "buffer" }
+ case object Cache extends MetricIdentity { val name = "cache" }
+ case object SwapUsed extends MetricIdentity { val name = "swap-used" }
+ case object SwapFree extends MetricIdentity { val name = "swap-free" }
+
+ case class MemoryMetricRecorder(used: Histogram, free: Histogram, buffer: Histogram, cache: Histogram, swapUsed: Histogram, swapFree: Histogram)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ MemoryMetricSnapshot(used.collect(context), free.collect(context), buffer.collect(context), cache.collect(context), swapUsed.collect(context), swapFree.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class MemoryMetricSnapshot(used: Histogram.Snapshot, free: Histogram.Snapshot, buffer: Histogram.Snapshot, cache: Histogram.Snapshot, swapUsed: Histogram.Snapshot, swapFree: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = MemoryMetricSnapshot
+
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
+ MemoryMetricSnapshot(used.merge(that.used, context), free.merge(that.free, context), buffer.merge(that.buffer, context), cache.merge(that.cache, context), swapUsed.merge(that.swapUsed, context), swapFree.merge(that.swapFree, context))
+ }
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ Used -> used,
+ Free -> free,
+ Buffer -> buffer,
+ Cache -> cache,
+ SwapUsed -> swapUsed,
+ SwapFree -> swapFree)
+ }
+
+ val Factory = new MetricGroupFactory {
+
+ type GroupRecorder = MemoryMetricRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.system.memory")
+
+ val usedConfig = settings.getConfig("used")
+ val freeConfig = settings.getConfig("free")
+ val bufferConfig = settings.getConfig("buffer")
+ val cacheConfig = settings.getConfig("cache")
+ val swapUsedConfig = settings.getConfig("swap-used")
+ val swapFreeConfig = settings.getConfig("swap-free")
+
+ new MemoryMetricRecorder(
+ Histogram.fromConfig(usedConfig, Scale.Mega),
+ Histogram.fromConfig(freeConfig, Scale.Mega),
+ Histogram.fromConfig(swapUsedConfig, Scale.Mega),
+ Histogram.fromConfig(swapFreeConfig, Scale.Mega),
+ Histogram.fromConfig(bufferConfig, Scale.Mega),
+ Histogram.fromConfig(cacheConfig, Scale.Mega))
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
new file mode 100644
index 00000000..831a06e3
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
@@ -0,0 +1,80 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.Histogram
+
+case class NetworkMetrics(name: String) extends MetricGroupIdentity {
+ val category = NetworkMetrics
+}
+
+object NetworkMetrics extends MetricGroupCategory {
+ val name = "network"
+
+ case object RxBytes extends MetricIdentity { val name = "rx-bytes" }
+ case object TxBytes extends MetricIdentity { val name = "tx-bytes" }
+ case object RxErrors extends MetricIdentity { val name = "rx-errors" }
+ case object TxErrors extends MetricIdentity { val name = "tx-errors" }
+
+ case class NetworkMetricRecorder(rxBytes: Histogram, txBytes: Histogram, rxErrors: Histogram, txErrors: Histogram)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ NetworkMetricSnapshot(rxBytes.collect(context), txBytes.collect(context), rxErrors.collect(context), txErrors.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class NetworkMetricSnapshot(rxBytes: Histogram.Snapshot, txBytes: Histogram.Snapshot, rxErrors: Histogram.Snapshot, txErrors: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = NetworkMetricSnapshot
+
+ def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
+ NetworkMetricSnapshot(rxBytes.merge(that.rxBytes, context), txBytes.merge(that.txBytes, context), rxErrors.merge(that.rxErrors, context), txErrors.merge(that.txErrors, context))
+ }
+
+ val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ RxBytes -> rxBytes,
+ TxBytes -> txBytes,
+ RxErrors -> rxErrors,
+ TxErrors -> txErrors)
+ }
+
+ val Factory = new MetricGroupFactory {
+
+ type GroupRecorder = NetworkMetricRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.system.network")
+
+ val rxBytesConfig = settings.getConfig("rx-bytes")
+ val txBytesConfig = settings.getConfig("tx-bytes")
+ val rxErrorsConfig = settings.getConfig("rx-errors")
+ val txErrorsConfig = settings.getConfig("tx-errors")
+
+ new NetworkMetricRecorder(
+ Histogram.fromConfig(rxBytesConfig, Scale.Kilo),
+ Histogram.fromConfig(txBytesConfig, Scale.Kilo),
+ Histogram.fromConfig(rxErrorsConfig),
+ Histogram.fromConfig(txErrorsConfig))
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
new file mode 100644
index 00000000..61f7ddb2
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
@@ -0,0 +1,73 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.metrics
+
+import akka.actor.ActorSystem
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.metric.instrument.Histogram
+
+case class ProcessCPUMetrics(name: String) extends MetricGroupIdentity {
+ val category = ProcessCPUMetrics
+}
+
+object ProcessCPUMetrics extends MetricGroupCategory {
+ val name = "proc-cpu"
+
+ case object User extends MetricIdentity { val name = "user" }
+ case object System extends MetricIdentity { val name = "system" }
+
+ case class ProcessCPUMetricsRecorder(user: Histogram, system: Histogram)
+ extends MetricGroupRecorder {
+
+ def collect(context: CollectionContext): MetricGroupSnapshot = {
+ ProcessCPUMetricsSnapshot(user.collect(context), system.collect(context))
+ }
+
+ def cleanup: Unit = {}
+ }
+
+ case class ProcessCPUMetricsSnapshot(user: Histogram.Snapshot, system: Histogram.Snapshot)
+ extends MetricGroupSnapshot {
+
+ type GroupSnapshotType = ProcessCPUMetricsSnapshot
+
+ def merge(that: ProcessCPUMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
+ ProcessCPUMetricsSnapshot(user.merge(that.user, context), system.merge(that.system, context))
+ }
+
+ lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
+ User -> user,
+ System -> system)
+ }
+
+ val Factory = new MetricGroupFactory {
+
+ type GroupRecorder = ProcessCPUMetricsRecorder
+
+ def create(config: Config, system: ActorSystem): GroupRecorder = {
+ val settings = config.getConfig("precision.system.process-cpu")
+
+ val userConfig = settings.getConfig("user")
+ val systemConfig = settings.getConfig("system")
+
+ new ProcessCPUMetricsRecorder(
+ Histogram.fromConfig(userConfig),
+ Histogram.fromConfig(systemConfig))
+ }
+ }
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
new file mode 100644
index 00000000..29048915
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
@@ -0,0 +1,63 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.system
+
+import java.lang.management.ManagementFactory
+
+import akka.actor._
+import akka.event.Logging
+import kamon.Kamon
+import kamon.metric.Metrics
+import kamon.metrics._
+
+import scala.collection.JavaConverters._
+import scala.concurrent.duration._
+
+object SystemMetrics extends ExtensionId[SystemMetricsExtension] with ExtensionIdProvider {
+ override def lookup(): ExtensionId[_ <: Extension] = SystemMetrics
+
+ override def createExtension(system: ExtendedActorSystem): SystemMetricsExtension = new SystemMetricsExtension(system)
+}
+
+class SystemMetricsExtension(private val system: ExtendedActorSystem) extends Kamon.Extension {
+ import kamon.system.SystemMetricsExtension._
+
+ val log = Logging(system, classOf[SystemMetricsExtension])
+ log.info(s"Starting the Kamon(SystemMetrics) extension")
+
+ val systemMetricsExtension = Kamon(Metrics)(system)
+
+ //JVM Metrics
+ systemMetricsExtension.register(HeapMetrics(Heap), HeapMetrics.Factory)
+ garbageCollectors.map { gc ⇒ systemMetricsExtension.register(GCMetrics(gc.getName), GCMetrics.Factory(gc)) }
+
+ //System Metrics
+ system.actorOf(SystemMetricsCollector.props(1 second), "system-metrics-collector")
+}
+
+object SystemMetricsExtension {
+ val CPU = "cpu"
+ val ProcessCPU = "process-cpu"
+ val Network = "network"
+ val Memory = "memory"
+ val Heap = "heap"
+
+ def toKB(value: Long): Long = (value / 1024)
+ def toMB(value: Long): Long = (value / 1024 / 1024)
+ def toLong(value: Double): Long = math round (value * 100L)
+
+ val garbageCollectors = ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
new file mode 100644
index 00000000..a5a2f411
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
@@ -0,0 +1,115 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.system
+
+import akka.actor.{ Actor, Props }
+import kamon.Kamon
+import kamon.metric.Metrics
+import kamon.metrics.CPUMetrics.CPUMetricRecorder
+import kamon.metrics.MemoryMetrics.MemoryMetricRecorder
+import kamon.metrics.NetworkMetrics.NetworkMetricRecorder
+import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsRecorder
+import kamon.metrics.{ CPUMetrics, MemoryMetrics, NetworkMetrics, ProcessCPUMetrics }
+import kamon.system.sigar.SigarHolder
+import org.hyperic.sigar.{ Mem, NetInterfaceStat, SigarProxy }
+
+import scala.concurrent.duration.FiniteDuration
+
+class SystemMetricsCollector(collectInterval: FiniteDuration) extends Actor with SigarExtensionProvider {
+ import kamon.system.SystemMetricsCollector._
+ import kamon.system.SystemMetricsExtension._
+
+ val collectSchedule = context.system.scheduler.schedule(collectInterval, collectInterval, self, Collect)(context.dispatcher)
+
+ val systemMetricsExtension = Kamon(Metrics)(context.system)
+
+ val cpuRecorder = systemMetricsExtension.register(CPUMetrics(CPU), CPUMetrics.Factory)
+ val processCpuRecorder = systemMetricsExtension.register(ProcessCPUMetrics(ProcessCPU), ProcessCPUMetrics.Factory)
+ val memoryRecorder = systemMetricsExtension.register(MemoryMetrics(Memory), MemoryMetrics.Factory)
+ val networkRecorder = systemMetricsExtension.register(NetworkMetrics(Network), NetworkMetrics.Factory)
+
+ def receive: Receive = {
+ case Collect ⇒ collectMetrics()
+ case anything ⇒
+ }
+
+ override def postStop() = collectSchedule.cancel()
+
+ def collectMetrics() = {
+ cpuRecorder.map(recordCpu)
+ processCpuRecorder.map(recordProcessCpu)
+ memoryRecorder.map(recordMemory)
+ networkRecorder.map(recordNetwork)
+ }
+
+ private def recordCpu(cpur: CPUMetricRecorder) = {
+ cpur.user.record(toLong(cpu.getUser))
+ cpur.system.record(toLong(cpu.getSys))
+ cpur.cpuWait.record(toLong(cpu.getWait()))
+ cpur.idle.record(toLong(cpu.getIdle))
+ }
+
+ private def recordProcessCpu(pcpur: ProcessCPUMetricsRecorder) = {
+ pcpur.user.record(procCpu.getUser)
+ pcpur.system.record(procCpu.getSys)
+ }
+
+ private def recordMemory(mr: MemoryMetricRecorder) = {
+ mr.used.record(toMB(mem.getUsed))
+ mr.free.record(toMB(mem.getFree))
+ mr.swapUsed.record(toMB(swap.getUsed))
+ mr.swapFree.record(toMB(swap.getFree))
+ mr.buffer.record(toMB(collectBuffer(mem)))
+ mr.cache.record(toMB(collectCache(mem)))
+
+ def collectBuffer(mem: Mem): Long = if (mem.getUsed() != mem.getActualUsed()) mem.getActualUsed() else 0L
+ def collectCache(mem: Mem): Long = if (mem.getFree() != mem.getActualFree()) mem.getActualFree() else 0L
+ }
+
+ private def recordNetwork(nr: NetworkMetricRecorder) = {
+ nr.rxBytes.record(collect(sigar, interfaces)(net ⇒ toKB(net.getRxBytes)))
+ nr.txBytes.record(collect(sigar, interfaces)(net ⇒ toKB(net.getTxBytes)))
+ nr.rxErrors.record(collect(sigar, interfaces)(net ⇒ net.getRxErrors))
+ nr.txErrors.record(collect(sigar, interfaces)(net ⇒ net.getTxErrors))
+
+ def collect(sigar: SigarProxy, interfaces: Set[String])(block: NetInterfaceStat ⇒ Long): Long = {
+ interfaces.foldLeft(0L) { (totalBytes, interface) ⇒
+ {
+ val net = sigar.getNetInterfaceStat(interface)
+ totalBytes + block(net)
+ }
+ }
+ }
+ }
+}
+
+object SystemMetricsCollector {
+ case object Collect
+
+ def props(collectInterval: FiniteDuration): Props = Props[SystemMetricsCollector](new SystemMetricsCollector(collectInterval))
+}
+
+trait SigarExtensionProvider {
+ lazy val sigar = SigarHolder.instance()
+
+ def pid = sigar.getPid
+ def procCpu = sigar.getProcCpu(pid)
+ def cpu = sigar.getCpuPerc
+ def mem = sigar.getMem
+ def swap = sigar.getSwap
+
+ val interfaces: Set[String] = sigar.getNetInterfaceList.toSet
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala
new file mode 100644
index 00000000..d138ec8f
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarLoader.scala
@@ -0,0 +1,173 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.system.sigar
+
+import java.io._
+import java.util
+import java.util.logging.Logger
+import java.util.{ ArrayList, Date, List }
+
+import org.hyperic.sigar.{ OperatingSystem, Sigar, SigarProxy }
+
+import scala.annotation.tailrec
+import scala.collection.JavaConversions._
+import scala.io.Source
+
+object SigarHolder {
+ private lazy val sigarProxy = SigarLoader.sigarProxy
+ def instance() = sigarProxy
+}
+
+object SigarLoader {
+
+ val Version = "1.6.4"
+ val JavaLibraryPath = "java.library.path"
+ val TmpDir = "java.io.tmpdir"
+ val IndexFile = "/kamon/system/sigar/index"
+ val UsrPathField = "usr_paths"
+
+ private val log = Logger.getLogger("SigarLoader")
+
+ def sigarProxy = init(new File(System.getProperty(TmpDir)))
+
+ private[sigar] def init(baseTmp: File): SigarProxy = {
+ val tmpDir = createTmpDir(baseTmp)
+ for (lib ← loadIndex) copy(lib, tmpDir)
+
+ attachToLibraryPath(tmpDir)
+
+ try {
+ val sigar = new Sigar()
+ printBanner(sigar)
+ sigar
+ } catch {
+ case t: Throwable ⇒ {
+ log.severe("Failed to load sigar")
+ throw new RuntimeException(t)
+ }
+ }
+ }
+
+ private[sigar] val usrPathField = {
+ val usrPathField = classOf[ClassLoader].getDeclaredField(UsrPathField)
+ usrPathField.setAccessible(true)
+ usrPathField
+ }
+
+ private[sigar] def attachToLibraryPath(dir: File): Unit = {
+ val dirAbsolute = dir.getAbsolutePath
+ System.setProperty(JavaLibraryPath, newLibraryPath(dirAbsolute))
+ var paths = usrPathField.get(null).asInstanceOf[Array[String]]
+ if (paths == null) paths = new Array[String](0)
+ for (path ← paths) if (path == dirAbsolute) return
+ val newPaths = util.Arrays.copyOf(paths, paths.length + 1)
+ newPaths(newPaths.length - 1) = dirAbsolute
+ usrPathField.set(null, newPaths)
+ }
+
+ private[sigar] def newLibraryPath(dirAbsolutePath: String): String = {
+ Option(System.getProperty(JavaLibraryPath)).fold(dirAbsolutePath)(oldValue ⇒ s"$dirAbsolutePath${File.pathSeparator}$oldValue")
+ }
+
+ private[sigar] def copy(lib: String, tmpDir: File) {
+ val target = new File(tmpDir, lib)
+ if (target.exists()) return
+ write(classOf[Loader].getResourceAsStream(lib), target)
+ }
+
+ private[sigar] def createTmpDir(baseTmp: File): File = {
+ val tmpDir = new File(baseTmp, s"sigar-$Version")
+ if (!tmpDir.exists()) {
+ if (!tmpDir.mkdirs()) throw new RuntimeException(s"Could not create temp sigar directory: ${tmpDir.getAbsolutePath}")
+ }
+ if (!tmpDir.isDirectory) throw new RuntimeException(s"sigar temp directory path is not a directory: ${tmpDir.getAbsolutePath}")
+ if (!tmpDir.canWrite()) throw new RuntimeException(s"sigar temp directory not writeable: ${tmpDir.getAbsolutePath}")
+ tmpDir
+ }
+
+ private[sigar] def loadIndex(): List[String] = {
+ val libs = new ArrayList[String]()
+ val is = classOf[Loader].getResourceAsStream(IndexFile)
+
+ for (line ← Source.fromInputStream(is).getLines()) {
+ val currentLine = line.trim()
+ libs add currentLine
+ }
+ libs
+ }
+
+ private[sigar] def write(input: InputStream, to: File) {
+ val out = new FileOutputStream(to)
+ try {
+ transfer(input, out)
+ } finally {
+ out.close()
+ }
+ }
+
+ private[sigar] def transfer(input: InputStream, out: OutputStream) {
+ val buffer = new Array[Byte](8192)
+
+ @tailrec def transfer() {
+ val read = input.read(buffer)
+ if (read >= 0) {
+ out.write(buffer, 0, read)
+ transfer()
+ }
+ }
+ transfer()
+ }
+
+ private[sigar] def printBanner(sigar: Sigar) = {
+ def loadAverage(sigar: Sigar) = {
+ val average = sigar.getLoadAverage
+ (average(0), average(1), average(2))
+ }
+
+ def uptime(sigar: Sigar) = {
+ val uptime = sigar.getUptime
+ val now = System.currentTimeMillis()
+ new Date(now - (uptime.getUptime() * 1000).toLong)
+ }
+
+ def osInfo() = {
+ val NewLine = "\n"
+ val os = OperatingSystem.getInstance
+ val osInfo = new StringBuilder("------ OS Information ------").append(NewLine)
+ osInfo.append("Description: ").append(os.getDescription).append(NewLine)
+ .append("Name: ").append(os.getName).append(NewLine)
+ .append("Version: ").append(os.getVersion).append(NewLine)
+ .append("Arch: ").append(os.getArch).append(NewLine)
+ .toString()
+ }
+
+ val message =
+ """
+ |
+ | _____ _ __ __ _ _ _ _ _
+ | / ____| | | | \/ | | | (_) | | | | | |
+ || (___ _ _ ___| |_ ___ _ __ ___ | \ / | ___| |_ _ __ _ ___ ___| | ___ __ _ __| | ___ __| |
+ | \___ \| | | / __| __/ _ \ '_ ` _ \| |\/| |/ _ \ __| '__| |/ __/ __| | / _ \ / _` |/ _` |/ _ \/ _` |
+ | ____) | |_| \__ \ || __/ | | | | | | | | __/ |_| | | | (__\__ \ |___| (_) | (_| | (_| | __/ (_| |
+ ||_____/ \__, |___/\__\___|_| |_| |_|_| |_|\___|\__|_| |_|\___|___/______\___/ \__,_|\__,_|\___|\__,_|
+ | __/ |
+ | |___/
+ """.stripMargin + s"\nBoot Time: ${uptime(sigar)} \nLoad Average: ${loadAverage(sigar)} \n${osInfo()}"
+ log.info(message)
+ }
+ class Loader private[sigar]
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/index b/kamon-system-metrics/src/main/scala/kamon/system/sigar/index
new file mode 100644
index 00000000..cad1f326
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/index
@@ -0,0 +1,21 @@
+libsigar-amd64-freebsd-6.so
+libsigar-amd64-linux.so
+libsigar-amd64-solaris.so
+libsigar-ia64-hpux-11.sl
+libsigar-ia64-linux.so
+libsigar-pa-hpux-11.sl
+libsigar-ppc64-aix-5.so
+libsigar-ppc64-linux.so
+libsigar-ppc-aix-5.so
+libsigar-ppc-linux.so
+libsigar-s390x-linux.so
+libsigar-sparc64-solaris.so
+libsigar-sparc-solaris.so
+libsigar-universal64-macosx.dylib
+libsigar-universal-macosx.dylib
+libsigar-x86-freebsd-5.so
+libsigar-x86-freebsd-6.so
+libsigar-x86-linux.so
+libsigar-x86-solaris.so
+sigar-amd64-winnt.dll
+sigar-x86-winnt.dll \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-freebsd-6.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-freebsd-6.so
new file mode 100644
index 00000000..3e94f0d2
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-freebsd-6.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-linux.so
new file mode 100644
index 00000000..5a2e4c24
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-solaris.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-solaris.so
new file mode 100644
index 00000000..6396482a
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-amd64-solaris.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-hpux-11.sl b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-hpux-11.sl
new file mode 100644
index 00000000..d92ea4a9
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-hpux-11.sl
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-linux.so
new file mode 100644
index 00000000..2bd2fc8e
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ia64-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-pa-hpux-11.sl b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-pa-hpux-11.sl
new file mode 100644
index 00000000..0dfd8a11
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-pa-hpux-11.sl
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-aix-5.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-aix-5.so
new file mode 100644
index 00000000..7d4b5199
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-aix-5.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-linux.so
new file mode 100644
index 00000000..4394b1b0
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-aix-5.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-aix-5.so
new file mode 100644
index 00000000..35fd8288
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-aix-5.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-linux.so
new file mode 100644
index 00000000..a1ba2529
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-ppc64-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-s390x-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-s390x-linux.so
new file mode 100644
index 00000000..c275f4ac
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-s390x-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc-solaris.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc-solaris.so
new file mode 100644
index 00000000..aa847d2b
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc-solaris.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc64-solaris.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc64-solaris.so
new file mode 100644
index 00000000..6c4fe809
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-sparc64-solaris.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal-macosx.dylib b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal-macosx.dylib
new file mode 100644
index 00000000..27ab1071
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal-macosx.dylib
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal64-macosx.dylib b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal64-macosx.dylib
new file mode 100644
index 00000000..0c721fec
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-universal64-macosx.dylib
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-5.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-5.so
new file mode 100644
index 00000000..8c50c611
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-5.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-6.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-6.so
new file mode 100644
index 00000000..f0800274
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-freebsd-6.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-linux.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-linux.so
new file mode 100644
index 00000000..a0b64edd
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-linux.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-solaris.so b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-solaris.so
new file mode 100644
index 00000000..c6452e56
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/libsigar-x86-solaris.so
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-amd64-winnt.dll b/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-amd64-winnt.dll
new file mode 100644
index 00000000..1ec8a035
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-amd64-winnt.dll
Binary files differ
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-x86-winnt.dll b/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-x86-winnt.dll
new file mode 100644
index 00000000..6afdc016
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/sigar-x86-winnt.dll
Binary files differ
diff --git a/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
new file mode 100644
index 00000000..09904a5f
--- /dev/null
+++ b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
@@ -0,0 +1,332 @@
+/* =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor.ActorSystem
+import akka.testkit.{ TestKitBase, TestProbe }
+import com.typesafe.config.ConfigFactory
+import kamon.Kamon
+import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metrics.CPUMetrics.CPUMetricSnapshot
+import kamon.metrics.GCMetrics.GCMetricSnapshot
+import kamon.metrics.HeapMetrics.HeapMetricSnapshot
+import kamon.metrics.MemoryMetrics.MemoryMetricSnapshot
+import kamon.metrics.NetworkMetrics.NetworkMetricSnapshot
+import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsSnapshot
+import kamon.metrics._
+import kamon.system.SystemMetricsExtension
+import org.scalatest.{ Ignore, Matchers, WordSpecLike }
+
+import scala.concurrent.duration._
+
+@Ignore
+class SystemMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
+ implicit lazy val system: ActorSystem = ActorSystem("system-metrics-spec", ConfigFactory.parseString(
+ """
+ |akka {
+ | extensions = ["kamon.system.SystemMetrics"]
+ |}
+ |
+ |kamon.metrics {
+ |
+ | disable-aspectj-weaver-missing-error = true
+ |
+ | tick-interval = 1 second
+ |
+ | system {
+ | cpu {
+ | user {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | system {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | wait {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | idle {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ | process-cpu {
+ | user {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | system {
+ | highest-trackable-value = 999999999
+ | significant-value-digits = 2
+ | }
+ | }
+ | memory {
+ | used {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | free {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | buffer {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | cache {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | swap-used {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | swap-free {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | }
+ | network {
+ | rx-bytes {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | tx-bytes {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | rx-errors {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | tx-errors {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | }
+ | heap {
+ | used {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | max {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | committed {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | }
+ | gc {
+ | count {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | time {
+ | highest-trackable-value = 3600000000000
+ | significant-value-digits = 2
+ | }
+ | }
+ | }
+ |}
+ """.stripMargin))
+
+ "the Kamon CPU Metrics" should {
+ "record user, system, wait, idle metrics" in new CPUMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val CPUMetrics = expectCPUMetrics(metricsListener, 3 seconds)
+ CPUMetrics.user.max should be >= 0L
+ CPUMetrics.system.max should be >= 0L
+ CPUMetrics.cpuWait.max should be >= 0L
+ CPUMetrics.idle.max should be >= 0L
+ }
+ }
+ "the Kamon GC Metrics" should {
+ "record count, time metrics" in new GCMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val GCMetrics = expectGCMetrics(metricsListener, 3 seconds)
+ GCMetrics.count.max should be > 0L
+ GCMetrics.time.max should be > 0L
+ }
+ }
+
+ "the Kamon Heap Metrics" should {
+ "record used, max, commited metrics" in new HeapMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val HeapMetrics = expectHeapMetrics(metricsListener, 3 seconds)
+ HeapMetrics.used.max should be >= 0L
+ HeapMetrics.max.max should be >= 0L
+ HeapMetrics.committed.max should be >= 0L
+ }
+ }
+
+ "the Kamon Memory Metrics" should {
+ "record used, free, buffer, cache, swap used, swap free metrics" in new MemoryMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val MemoryMetrics = expectMemoryMetrics(metricsListener, 3 seconds)
+ MemoryMetrics.used.max should be >= 0L
+ MemoryMetrics.free.max should be >= 0L
+ MemoryMetrics.buffer.max should be >= 0L
+ MemoryMetrics.cache.max should be >= 0L
+ MemoryMetrics.swapUsed.max should be >= 0L
+ MemoryMetrics.swapFree.max should be >= 0L
+ }
+ }
+
+ "the Kamon Network Metrics" should {
+ "record rxBytes, txBytes, rxErrors, txErrors metrics" in new NetworkMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val NetworkMetrics = expectNetworkMetrics(metricsListener, 3 seconds)
+ NetworkMetrics.rxBytes.max should be >= 0L
+ NetworkMetrics.txBytes.max should be >= 0L
+ NetworkMetrics.rxErrors.max should be >= 0L
+ NetworkMetrics.txErrors.max should be >= 0L
+ }
+ }
+
+ "the Kamon Process CPU Metrics" should {
+ "record user, system metrics" in new ProcessCPUMetricsListenerFixture {
+ val metricsListener = subscribeToMetrics()
+
+ val ProcessCPUMetrics = expectProcessCPUMetrics(metricsListener, 3 seconds)
+ ProcessCPUMetrics.user.max should be > 0L
+ ProcessCPUMetrics.system.max should be > 0L
+ }
+ }
+
+ def expectCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): CPUMetricSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+ val cpuMetricsOption = tickSnapshot.metrics.get(CPUMetrics(SystemMetricsExtension.CPU))
+ cpuMetricsOption should not be empty
+ cpuMetricsOption.get.asInstanceOf[CPUMetricSnapshot]
+ }
+
+ trait CPUMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(CPUMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+
+ def expectGCMetrics(listener: TestProbe, waitTime: FiniteDuration): GCMetricSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+
+ val gcMetricsOption = tickSnapshot.metrics.get(GCMetrics(SystemMetricsExtension.garbageCollectors(0).getName))
+ gcMetricsOption should not be empty
+ gcMetricsOption.get.asInstanceOf[GCMetricSnapshot]
+ }
+
+ trait GCMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(GCMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+
+ def expectHeapMetrics(listener: TestProbe, waitTime: FiniteDuration): HeapMetricSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+ val heapMetricsOption = tickSnapshot.metrics.get(HeapMetrics(SystemMetricsExtension.Heap))
+ heapMetricsOption should not be empty
+ heapMetricsOption.get.asInstanceOf[HeapMetricSnapshot]
+ }
+
+ trait HeapMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(HeapMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+
+ def expectMemoryMetrics(listener: TestProbe, waitTime: FiniteDuration): MemoryMetricSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+ val memoryMetricsOption = tickSnapshot.metrics.get(MemoryMetrics(SystemMetricsExtension.Memory))
+ memoryMetricsOption should not be empty
+ memoryMetricsOption.get.asInstanceOf[MemoryMetricSnapshot]
+ }
+
+ trait MemoryMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(MemoryMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+
+ def expectNetworkMetrics(listener: TestProbe, waitTime: FiniteDuration): NetworkMetricSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+ val networkMetricsOption = tickSnapshot.metrics.get(NetworkMetrics(SystemMetricsExtension.Network))
+ networkMetricsOption should not be empty
+ networkMetricsOption.get.asInstanceOf[NetworkMetricSnapshot]
+ }
+
+ trait NetworkMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(NetworkMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+
+ def expectProcessCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): ProcessCPUMetricsSnapshot = {
+ val tickSnapshot = within(waitTime) {
+ listener.expectMsgType[TickMetricSnapshot]
+ }
+ val processCPUMetricsOption = tickSnapshot.metrics.get(ProcessCPUMetrics(SystemMetricsExtension.ProcessCPU))
+ processCPUMetricsOption should not be empty
+ processCPUMetricsOption.get.asInstanceOf[ProcessCPUMetricsSnapshot]
+ }
+
+ trait ProcessCPUMetricsListenerFixture {
+ def subscribeToMetrics(): TestProbe = {
+ val metricsListener = TestProbe()
+ Kamon(Metrics).subscribe(ProcessCPUMetrics, "*", metricsListener.ref, permanently = true)
+ // Wait for one empty snapshot before proceeding to the test.
+ metricsListener.expectMsgType[TickMetricSnapshot]
+ metricsListener
+ }
+ }
+}
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index 3c2c6d0f..ce78497d 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -35,7 +35,8 @@ object Dependencies {
val slf4Api = "org.slf4j" % "slf4j-api" % slf4jVersion
val slf4nop = "org.slf4j" % "slf4j-nop" % slf4jVersion
val jsr166 = "io.gatling" % "jsr166e" % "1.0"
-
+ val scalaCompiler = "org.scala-lang" % "scala-compiler" % Settings.ScalaVersion
+ val sigar = "org.fusesource" % "sigar" % "1.6.4"
def compile (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile")
def provided (deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided")
diff --git a/project/Projects.scala b/project/Projects.scala
index e8675186..8259f2aa 100644
--- a/project/Projects.scala
+++ b/project/Projects.scala
@@ -7,17 +7,21 @@ object Projects extends Build {
import Dependencies._
lazy val root = Project("root", file("."))
- .aggregate(kamonCore, kamonSpray, kamonNewrelic, kamonPlayground, kamonDashboard, kamonTestkit, kamonPlay, kamonStatsd)
+ .aggregate(kamonCore, kamonSpray, kamonNewrelic, kamonPlayground, kamonDashboard, kamonTestkit, kamonPlay, kamonStatsD,
+ kamonDatadog, kamonSystemMetrics, kamonLogReporter)
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(noPublishing: _*)
lazy val kamonCore = Project("kamon-core", file("kamon-core"))
+ .dependsOn(kamonMacros % "compile-internal, test-internal")
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(aspectJSettings: _*)
.settings(
+ mappings in (Compile, packageBin) ++= mappings.in(kamonMacros, Compile, packageBin).value,
+ mappings in (Compile, packageSrc) ++= mappings.in(kamonMacros, Compile, packageSrc).value,
libraryDependencies ++=
compile(akkaActor, aspectJ, aspectjWeaver, hdrHistogram, jsr166) ++
provided(logback) ++
@@ -32,7 +36,8 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor, aspectJ, sprayCan, sprayClient, sprayRouting) ++
test(scalatest, akkaTestKit, sprayTestkit, slf4Api, slf4nop))
- .dependsOn(kamonCore, kamonTestkit)
+ .dependsOn(kamonCore)
+ .dependsOn(kamonTestkit % "test")
lazy val kamonNewrelic = Project("kamon-newrelic", file("kamon-newrelic"))
@@ -53,7 +58,7 @@ object Projects extends Build {
.settings(
libraryDependencies ++=
compile(akkaActor, akkaSlf4j, sprayCan, sprayClient, sprayRouting, logback))
- .dependsOn(kamonSpray, kamonNewrelic, kamonStatsd, kamonDatadog)
+ .dependsOn(kamonSpray, kamonNewrelic, kamonStatsD, kamonDatadog, kamonLogReporter, kamonSystemMetrics)
lazy val kamonDashboard = Project("kamon-dashboard", file("kamon-dashboard"))
@@ -76,18 +81,39 @@ object Projects extends Build {
.settings(libraryDependencies ++= compile(play, playWS, aspectJ) ++ test(playTest, akkaTestKit, slf4Api))
.dependsOn(kamonCore)
- lazy val kamonStatsd = Project("kamon-statsd", file("kamon-statsd"))
+ lazy val kamonStatsD = Project("kamon-statsd", file("kamon-statsd"))
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(libraryDependencies ++= compile(akkaActor) ++ test(scalatest, akkaTestKit, slf4Api, slf4nop))
.dependsOn(kamonCore)
+ .dependsOn(kamonSystemMetrics % "provided")
lazy val kamonDatadog = Project("kamon-datadog", file("kamon-datadog"))
.settings(basicSettings: _*)
.settings(formatSettings: _*)
+ .settings(libraryDependencies ++= compile(akkaActor) ++ test(scalatest, akkaTestKit, slf4Api, slf4nop))
+ .dependsOn(kamonCore)
+ .dependsOn(kamonSystemMetrics % "provided")
+
+ lazy val kamonLogReporter = Project("kamon-log-reporter", file("kamon-log-reporter"))
+ .settings(basicSettings: _*)
+ .settings(formatSettings: _*)
.settings(libraryDependencies ++= compile(akkaActor) ++ test(scalatest, akkaTestKit, slf4Api, slf4nop))
.dependsOn(kamonCore)
+ .dependsOn(kamonSystemMetrics % "provided")
+ lazy val kamonMacros = Project("kamon-macros", file("kamon-macros"))
+ .settings(basicSettings: _*)
+ .settings(formatSettings: _*)
+ .settings(noPublishing: _*)
+ .settings(libraryDependencies ++= compile(scalaCompiler))
+
+ lazy val kamonSystemMetrics = Project("kamon-system-metrics", file("kamon-system-metrics"))
+ .settings(basicSettings: _*)
+ .settings(formatSettings: _*)
+ .settings(libraryDependencies ++= compile(sigar) ++ test(scalatest, akkaTestKit, slf4Api, slf4nop))
+ .settings(fork in Test := true)
+ .dependsOn(kamonCore)
val noPublishing = Seq(publish := (), publishLocal := (), publishArtifact := false)
}
diff --git a/project/Settings.scala b/project/Settings.scala
index 3f5f669e..5f489239 100644
--- a/project/Settings.scala
+++ b/project/Settings.scala
@@ -8,10 +8,15 @@ import scalariform.formatter.preferences._
object Settings {
- lazy val basicSettings = seq(
- scalaVersion := "2.11.1",
+ val ScalaVersion = "2.11.1"
+
+ lazy val basicSettings = Seq(
+ scalaVersion := ScalaVersion,
resolvers ++= Dependencies.resolutionRepos,
fork in run := true,
+ javacOptions := Seq(
+ "-source", "1.6", "-target", "1.6"
+ ),
scalacOptions := Seq(
"-encoding",
"utf8",
@@ -23,7 +28,7 @@ object Settings {
"-language:postfixOps",
"-language:implicitConversions",
"-Xlog-reflective-calls"
- )) ++ publishSettings ++ releaseSettings ++ net.virtualvoid.sbt.graph.Plugin.graphSettings
+ )) ++ publishSettings ++ releaseSettings
lazy val formatSettings = SbtScalariform.scalariformSettings ++ Seq(
ScalariformKeys.preferences in Compile := formattingPreferences,
@@ -33,7 +38,7 @@ object Settings {
def formattingPreferences =
FormattingPreferences()
.setPreference(RewriteArrowSymbols, true)
- .setPreference(AlignParameters, true)
+ .setPreference(AlignParameters, false)
.setPreference(AlignSingleLineCaseStatements, true)
.setPreference(DoubleIndentClassDeclaration, true)
} \ No newline at end of file