aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--kamon-akka-remote/src/main/resources/META-INF/aop.xml2
-rw-r--r--kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala (renamed from kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala)30
-rw-r--r--kamon-akka-remote/src/test/resources/logback.xml (renamed from kamon-akka/src/test/resources/logback.xml)7
-rw-r--r--kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala (renamed from kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala)26
-rw-r--r--kamon-akka/src/main/resources/reference.conf35
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala93
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala8
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala104
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala87
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala (renamed from kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala)62
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala (renamed from kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala)4
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala (renamed from kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala)6
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala (renamed from kamon-akka/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala)43
-rw-r--r--kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala168
-rw-r--r--kamon-akka/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala164
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala (renamed from kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala)39
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala (renamed from kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala)27
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala (renamed from kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala)64
-rw-r--r--kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala (renamed from kamon-akka/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala)31
-rw-r--r--kamon-akka/src/test/scala/kamon/metric/ActorMetricsSpec.scala77
-rw-r--r--kamon-akka/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala213
-rw-r--r--kamon-akka/src/test/scala/kamon/metric/RouterMetricsSpec.scala84
-rw-r--r--kamon-core/src/main/resources/META-INF/aop.xml8
-rw-r--r--kamon-core/src/main/resources/reference.conf120
-rw-r--r--kamon-core/src/main/scala/kamon/Kamon.scala19
-rw-r--r--kamon-core/src/main/scala/kamon/ModuleSupervisor.scala48
-rw-r--r--kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala114
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Entity.scala52
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala75
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala157
-rw-r--r--kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala47
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricKey.scala153
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala166
-rw-r--r--kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala100
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Scale.scala31
-rw-r--r--kamon-core/src/main/scala/kamon/metric/Subscriptions.scala173
-rw-r--r--kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala115
-rw-r--r--kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala49
-rw-r--r--kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala74
-rw-r--r--kamon-core/src/main/scala/kamon/metric/UserMetrics.scala278
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala)0
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala15
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala108
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala164
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala56
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala35
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala67
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala29
-rw-r--r--kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala55
-rw-r--r--kamon-core/src/main/scala/kamon/metric/package.scala34
-rw-r--r--kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala61
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Incubator.scala2
-rw-r--r--kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala37
-rw-r--r--kamon-core/src/main/scala/kamon/trace/Sampler.scala3
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceContext.scala66
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceExtension.scala91
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceLocal.scala4
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala79
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracerExtension.scala94
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala30
-rw-r--r--kamon-core/src/main/scala/kamon/trace/TracingContext.scala13
-rw-r--r--kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala4
-rw-r--r--kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala4
-rw-r--r--kamon-core/src/main/scala/kamon/util/ConfigTools.scala26
-rw-r--r--kamon-core/src/main/scala/kamon/util/FastDispatch.scala22
-rw-r--r--kamon-core/src/main/scala/kamon/util/MapMerge.scala27
-rw-r--r--kamon-core/src/main/scala/kamon/util/Timestamp.scala (renamed from kamon-core/src/main/scala/kamon/TimeUnits.scala)12
-rw-r--r--kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala18
-rw-r--r--kamon-core/src/test/resources/logback.xml24
-rw-r--r--kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala112
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala65
-rw-r--r--kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala107
-rw-r--r--kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala296
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala1
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala66
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala34
-rw-r--r--kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala22
-rw-r--r--kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala34
-rw-r--r--kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala76
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala94
-rw-r--r--kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala23
-rw-r--r--kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala9
-rw-r--r--kamon-datadog/src/main/resources/reference.conf11
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala66
-rw-r--r--kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala36
-rw-r--r--kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala106
-rw-r--r--kamon-jdbc/src/main/resources/reference.conf11
-rw-r--r--kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala44
-rw-r--r--kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala75
-rw-r--r--kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala195
-rw-r--r--kamon-log-reporter/src/main/resources/reference.conf3
-rw-r--r--kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala237
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala18
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala2
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala25
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala18
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala6
-rw-r--r--kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala71
-rw-r--r--kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala2
-rw-r--r--kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala24
-rw-r--r--kamon-play/src/main/resources/reference.conf2
-rw-r--r--kamon-play/src/main/scala/kamon/play/Play.scala10
-rw-r--r--kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala4
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala25
-rw-r--r--kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala7
-rw-r--r--kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala31
-rw-r--r--kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala32
-rw-r--r--kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala28
-rw-r--r--kamon-scala/src/main/resources/META-INF/aop.xml17
-rw-r--r--kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala)6
-rw-r--r--kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala (renamed from kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala)6
-rw-r--r--kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala)25
-rw-r--r--kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala (renamed from kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala)29
-rw-r--r--kamon-spray/src/main/resources/META-INF/aop.xml6
-rw-r--r--kamon-spray/src/main/resources/reference.conf7
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala4
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala (renamed from kamon-spray/src/main/scala/kamon/spray/Spray.scala)61
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala35
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala (renamed from kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala)29
-rw-r--r--kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala (renamed from kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala)38
-rw-r--r--kamon-spray/src/test/resources/application.conf25
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala147
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala67
-rw-r--r--kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala51
-rw-r--r--kamon-statsd/src/main/resources/reference.conf11
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala16
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala65
-rw-r--r--kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala8
-rw-r--r--kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala14
-rw-r--r--kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala133
-rw-r--r--kamon-system-metrics/src/main/resources/reference.conf277
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala88
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ClassLoadingMetrics.scala85
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala81
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/DiskMetrics.scala85
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala77
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala87
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/LoadAverageMetrics.scala80
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala92
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala91
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/NonHeapMetrics.scala86
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala76
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/metrics/ThreadMetrics.scala85
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/GcMetricsCollector.scala77
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala78
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsBanner.scala91
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala266
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala70
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala96
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala28
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala34
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala29
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala13
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala33
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala28
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala29
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala41
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala25
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala25
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala36
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala33
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala39
-rw-r--r--kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala59
-rw-r--r--kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala450
-rw-r--r--kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala (renamed from kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala)5
-rw-r--r--kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala4
-rw-r--r--project/Dependencies.scala2
-rw-r--r--project/Projects.scala40
168 files changed, 4498 insertions, 5584 deletions
diff --git a/kamon-akka-remote/src/main/resources/META-INF/aop.xml b/kamon-akka-remote/src/main/resources/META-INF/aop.xml
index ba1c8e79..e84a6094 100644
--- a/kamon-akka-remote/src/main/resources/META-INF/aop.xml
+++ b/kamon-akka-remote/src/main/resources/META-INF/aop.xml
@@ -3,7 +3,7 @@
<aspectj>
<aspects>
<!-- Remoting and Cluster -->
- <aspect name="akka.remote.instrumentation.RemotingInstrumentation"/>
+ <aspect name="akka.kamon.instrumentation.RemotingInstrumentation"/>
</aspects>
<weaver>
diff --git a/kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala b/kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala
index 6bdee063..eb18ed87 100644
--- a/kamon-akka-remote/src/main/scala/kamon/instrumentation/akka/RemotingInstrumentation.scala
+++ b/kamon-akka-remote/src/main/scala/kamon/akka/instrumentation/RemotingInstrumentation.scala
@@ -1,12 +1,12 @@
-package akka.remote.instrumentation
+package akka.kamon.instrumentation
import akka.actor.{ ActorRef, Address }
import akka.remote.instrumentation.TraceContextAwareWireFormats.{ TraceContextAwareRemoteEnvelope, RemoteTraceContext, AckAndTraceContextAwareEnvelopeContainer }
import akka.remote.{ RemoteActorRefProvider, Ack, SeqNo }
import akka.remote.WireFormats._
import akka.util.ByteString
-import kamon.MilliTimestamp
-import kamon.trace.TraceRecorder
+import kamon.trace.{ Tracer, TraceContext }
+import kamon.util.MilliTimestamp
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -32,15 +32,13 @@ class RemotingInstrumentation {
envelopeBuilder.setMessage(serializedMessage)
// Attach the TraceContext info, if available.
- if (!TraceRecorder.currentContext.isEmpty) {
- val context = TraceRecorder.currentContext
- val relativeStartMilliTime = System.currentTimeMillis - ((System.nanoTime - context.startRelativeTimestamp.nanos) / 1000000)
+ TraceContext.map { context ⇒
envelopeBuilder.setTraceContext(RemoteTraceContext.newBuilder()
.setTraceName(context.name)
.setTraceToken(context.token)
.setIsOpen(context.isOpen)
- .setStartMilliTime(relativeStartMilliTime)
+ .setStartMilliTime(context.startTimestamp.toMilliTimestamp.millis)
.build())
}
@@ -85,14 +83,16 @@ class RemotingInstrumentation {
if (ackAndEnvelope.hasEnvelope && ackAndEnvelope.getEnvelope.hasTraceContext) {
val remoteTraceContext = ackAndEnvelope.getEnvelope.getTraceContext
val system = provider.guardian.underlying.system
- val ctx = TraceRecorder.joinRemoteTraceContext(
- remoteTraceContext.getTraceName(),
- remoteTraceContext.getTraceToken(),
- new MilliTimestamp(remoteTraceContext.getStartMilliTime()),
- remoteTraceContext.getIsOpen(),
- system)
-
- TraceRecorder.setContext(ctx)
+ val tracer = Tracer.get(system)
+
+ val ctx = tracer.newContext(
+ remoteTraceContext.getTraceName,
+ remoteTraceContext.getTraceToken,
+ new MilliTimestamp(remoteTraceContext.getStartMilliTime()).toRelativeNanoTimestamp,
+ remoteTraceContext.getIsOpen,
+ isLocal = false)
+
+ TraceContext.setCurrentContext(ctx)
}
pjp.proceed()
diff --git a/kamon-akka/src/test/resources/logback.xml b/kamon-akka-remote/src/test/resources/logback.xml
index 10c9aa35..dd623d61 100644
--- a/kamon-akka/src/test/resources/logback.xml
+++ b/kamon-akka-remote/src/test/resources/logback.xml
@@ -1,4 +1,8 @@
<configuration scan="true">
+ <contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
+ <resetJUL>true</resetJUL>
+ </contextListener>
+
<conversionRule conversionWord="traceToken" converterClass="kamon.trace.logging.LogbackTraceTokenConverter"/>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
@@ -8,7 +12,6 @@
</appender>
<root level="error">
- <appender-ref ref="STDOUT" />
+ <appender-ref ref="STDOUT"/>
</root>
-
</configuration>
diff --git a/kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala b/kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala
index 8a3973ca..367a7349 100644
--- a/kamon-akka-remote/src/test/scala/kamon/instrumentation/akka/RemotingInstrumentationSpec.scala
+++ b/kamon-akka-remote/src/test/scala/kamon/akka/instrumentation/RemotingInstrumentationSpec.scala
@@ -8,7 +8,8 @@ import akka.routing.RoundRobinGroup
import akka.testkit.{ ImplicitSender, TestKitBase }
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
-import kamon.trace.TraceRecorder
+import kamon.Kamon
+import kamon.trace.TraceContext
import org.scalatest.{ Matchers, WordSpecLike }
import scala.concurrent.duration._
@@ -18,6 +19,8 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
implicit lazy val system: ActorSystem = ActorSystem("remoting-spec-local-system", ConfigFactory.parseString(
"""
|akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |
| actor {
| provider = "akka.remote.RemoteActorRefProvider"
| }
@@ -34,6 +37,8 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
val remoteSystem: ActorSystem = ActorSystem("remoting-spec-remote-system", ConfigFactory.parseString(
"""
|akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |
| actor {
| provider = "akka.remote.RemoteActorRefProvider"
| }
@@ -47,11 +52,13 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
|}
""".stripMargin))
+ lazy val kamon = Kamon(system)
val RemoteSystemAddress = AddressFromURIString("akka.tcp://remoting-spec-remote-system@127.0.0.1:2553")
+ import kamon.tracer.newContext
"The Remoting instrumentation" should {
"propagate the TraceContext when creating a new remote actor" in {
- TraceRecorder.withNewTraceContext("deploy-remote-actor", Some("deploy-remote-actor-1")) {
+ TraceContext.withContext(newContext("deploy-remote-actor", "deploy-remote-actor-1")) {
system.actorOf(TraceTokenReplier.remoteProps(Some(testActor), RemoteSystemAddress), "remote-deploy-fixture")
}
@@ -61,7 +68,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
"propagate the TraceContext when sending a message to a remotely deployed actor" in {
val remoteRef = system.actorOf(TraceTokenReplier.remoteProps(None, RemoteSystemAddress), "remote-message-fixture")
- TraceRecorder.withNewTraceContext("message-remote-actor", Some("message-remote-actor-1")) {
+ TraceContext.withContext(newContext("message-remote-actor", "message-remote-actor-1")) {
remoteRef ! "reply-trace-token"
}
@@ -73,7 +80,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
implicit val askTimeout = Timeout(10 seconds)
val remoteRef = system.actorOf(TraceTokenReplier.remoteProps(None, RemoteSystemAddress), "remote-ask-and-pipe-fixture")
- TraceRecorder.withNewTraceContext("ask-and-pipe-remote-actor", Some("ask-and-pipe-remote-actor-1")) {
+ TraceContext.withContext(newContext("ask-and-pipe-remote-actor", "ask-and-pipe-remote-actor-1")) {
(remoteRef ? "reply-trace-token") pipeTo (testActor)
}
@@ -85,7 +92,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
remoteSystem.actorOf(TraceTokenReplier.props(None), "actor-selection-target-b")
val selection = system.actorSelection(RemoteSystemAddress + "/user/actor-selection-target-*")
- TraceRecorder.withNewTraceContext("message-remote-actor-selection", Some("message-remote-actor-selection-1")) {
+ TraceContext.withContext(newContext("message-remote-actor-selection", "message-remote-actor-selection-1")) {
selection ! "reply-trace-token"
}
@@ -97,7 +104,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
"propagate the TraceContext a remotely supervised child fails" in {
val supervisor = system.actorOf(Props(new SupervisorOfRemote(testActor, RemoteSystemAddress)))
- TraceRecorder.withNewTraceContext("remote-supervision", Some("remote-supervision-1")) {
+ TraceContext.withContext(newContext("remote-supervision", "remote-supervision-1")) {
supervisor ! "fail"
}
@@ -108,7 +115,7 @@ class RemotingInstrumentationSpec extends TestKitBase with WordSpecLike with Mat
remoteSystem.actorOf(TraceTokenReplier.props(None), "remote-routee")
val router = system.actorOf(RoundRobinGroup(List(RemoteSystemAddress + "/user/actor-selection-target-*")).props(), "router")
- TraceRecorder.withNewTraceContext("remote-routee", Some("remote-routee-1")) {
+ TraceContext.withContext(newContext("remote-routee", "remote-routee-1")) {
router ! "reply-trace-token"
}
@@ -127,12 +134,11 @@ class TraceTokenReplier(creationTraceContextListener: Option[ActorRef]) extends
case "fail" ⇒
throw new ArithmeticException("Division by zero.")
case "reply-trace-token" ⇒
- log.info("Sending back the TT: " + TraceRecorder.currentContext.token)
sender ! currentTraceContextInfo
}
def currentTraceContextInfo: String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
s"name=${ctx.name}|token=${ctx.token}|isOpen=${ctx.isOpen}"
}
}
@@ -161,7 +167,7 @@ class SupervisorOfRemote(traceContextListener: ActorRef, remoteAddress: Address)
}
def currentTraceContextInfo: String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
s"name=${ctx.name}|token=${ctx.token}|isOpen=${ctx.isOpen}"
}
}
diff --git a/kamon-akka/src/main/resources/reference.conf b/kamon-akka/src/main/resources/reference.conf
index 4f742ee6..902a682d 100644
--- a/kamon-akka/src/main/resources/reference.conf
+++ b/kamon-akka/src/main/resources/reference.conf
@@ -13,39 +13,6 @@ kamon {
ask-pattern-timeout-warning = off
# Default dispatcher for all akka module operations
- dispatcher = ${kamon.default-dispatcher}
- }
-
- metrics.precision {
- actor {
- processing-time = ${kamon.metrics.precision.default-histogram-precision}
- time-in-mailbox = ${kamon.metrics.precision.default-histogram-precision}
- mailbox-size = ${kamon.metrics.precision.default-min-max-counter-precision}
- }
-
- router {
- routing-time = ${kamon.metrics.precision.default-histogram-precision}
- processing-time = ${kamon.metrics.precision.default-histogram-precision}
- time-in-mailbox = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- dispatcher {
- maximum-pool-size {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- running-thread-count {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- queued-task-count {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- pool-size {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- }
+ dispatcher = "akka.actor.default-dispatcher"
}
} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala
index b22f7fa9..c99df586 100644
--- a/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/ActorMetrics.scala
@@ -16,79 +16,26 @@
package kamon.akka
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Counter, Histogram, MinMaxCounter }
+import kamon.metric.{ EntityRecorderFactory, GenericEntityRecorder }
+import kamon.metric.instrument.{ Time, InstrumentFactory }
-case class ActorMetrics(name: String) extends MetricGroupIdentity {
- val category = ActorMetrics
-}
-
-object ActorMetrics extends MetricGroupCategory {
- val name = "actor"
-
- case object ProcessingTime extends MetricIdentity { val name = "processing-time" }
- case object MailboxSize extends MetricIdentity { val name = "mailbox-size" }
- case object TimeInMailbox extends MetricIdentity { val name = "time-in-mailbox" }
- case object Errors extends MetricIdentity { val name = "errors" }
-
- case class ActorMetricsRecorder(processingTime: Histogram, timeInMailbox: Histogram, mailboxSize: MinMaxCounter,
- errors: Counter) extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): ActorMetricSnapshot =
- ActorMetricSnapshot(
- processingTime.collect(context),
- timeInMailbox.collect(context),
- mailboxSize.collect(context),
- errors.collect(context))
-
- def cleanup: Unit = {
- processingTime.cleanup
- mailboxSize.cleanup
- timeInMailbox.cleanup
- errors.cleanup
- }
- }
-
- case class ActorMetricSnapshot(processingTime: Histogram.Snapshot, timeInMailbox: Histogram.Snapshot,
- mailboxSize: Histogram.Snapshot, errors: Counter.Snapshot) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ActorMetricSnapshot
-
- def merge(that: ActorMetricSnapshot, context: CollectionContext): ActorMetricSnapshot =
- ActorMetricSnapshot(
- processingTime.merge(that.processingTime, context),
- timeInMailbox.merge(that.timeInMailbox, context),
- mailboxSize.merge(that.mailboxSize, context),
- errors.merge(that.errors, context))
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- (ProcessingTime -> processingTime),
- (MailboxSize -> mailboxSize),
- (TimeInMailbox -> timeInMailbox),
- (Errors -> errors))
- }
-
- val Factory = ActorMetricGroupFactory
+/**
+ * Entity recorder for Akka Actors. The metrics being tracked are:
+ *
+ * - time-in-mailbox: Time spent from the instant when a message is enqueued in a actor's mailbox to the instant when
+ * that message is dequeued for processing.
+ * - processing-time: Time taken for the actor to process the receive function.
+ * - mailbox-size: Size of the actor's mailbox.
+ * - errors: Number or errors seen by the actor's supervision mechanism.
+ */
+class ActorMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val timeInMailbox = histogram("time-in-mailbox", Time.Nanoseconds)
+ val processingTime = histogram("processing-time", Time.Nanoseconds)
+ val mailboxSize = minMaxCounter("mailbox-size")
+ val errors = counter("errors")
}
-case object ActorMetricGroupFactory extends MetricGroupFactory {
- import kamon.akka.ActorMetrics._
-
- type GroupRecorder = ActorMetricsRecorder
-
- def create(config: Config, system: ActorSystem): ActorMetricsRecorder = {
- val settings = config.getConfig("precision.actor")
-
- val processingTimeConfig = settings.getConfig("processing-time")
- val timeInMailboxConfig = settings.getConfig("time-in-mailbox")
- val mailboxSizeConfig = settings.getConfig("mailbox-size")
-
- new ActorMetricsRecorder(
- Histogram.fromConfig(processingTimeConfig),
- Histogram.fromConfig(timeInMailboxConfig),
- MinMaxCounter.fromConfig(mailboxSizeConfig, system),
- Counter())
- }
-}
+object ActorMetrics extends EntityRecorderFactory[ActorMetrics] {
+ def category: String = "akka-actor"
+ def createRecorder(instrumentFactory: InstrumentFactory): ActorMetrics = new ActorMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala b/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala
index bc013b63..cbca7db6 100644
--- a/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/AkkaExtension.scala
@@ -16,8 +16,8 @@
package kamon.akka
-import akka.actor
-import akka.actor._
+import _root_.akka.actor
+import _root_.akka.actor._
import kamon._
class AkkaExtension(system: ExtendedActorSystem) extends Kamon.Extension {
@@ -29,4 +29,6 @@ class AkkaExtension(system: ExtendedActorSystem) extends Kamon.Extension {
object Akka extends ExtensionId[AkkaExtension] with ExtensionIdProvider {
def lookup(): ExtensionId[_ <: actor.Extension] = Akka
def createExtension(system: ExtendedActorSystem): AkkaExtension = new AkkaExtension(system)
-} \ No newline at end of file
+
+}
+
diff --git a/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala
index 64e16f96..acf92e70 100644
--- a/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/DispatcherMetrics.scala
@@ -16,79 +16,71 @@
package kamon.akka
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
+import java.util.concurrent.ThreadPoolExecutor
+
+import _root_.akka.dispatch.ForkJoinExecutorConfigurator.AkkaForkJoinPool
import kamon.metric._
-import kamon.metric.instrument.Histogram
+import kamon.metric.instrument.{ DifferentialValueCollector, InstrumentFactory }
-case class DispatcherMetrics(name: String) extends MetricGroupIdentity {
- val category = DispatcherMetrics
-}
+class ForkJoinPoolDispatcherMetrics(fjp: AkkaForkJoinPool, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val paralellism = minMaxCounter("parallelism")
+ paralellism.increment(fjp.getParallelism) // Steady value.
-object DispatcherMetrics extends MetricGroupCategory {
- val name = "dispatcher"
+ val poolSize = gauge("pool-size", () ⇒ {
+ fjp.getPoolSize.toLong
+ })
- case object MaximumPoolSize extends MetricIdentity { val name = "maximum-pool-size" }
- case object RunningThreadCount extends MetricIdentity { val name = "running-thread-count" }
- case object QueueTaskCount extends MetricIdentity { val name = "queued-task-count" }
- case object PoolSize extends MetricIdentity { val name = "pool-size" }
+ val activeThreads = gauge("active-threads", () ⇒ {
+ fjp.getActiveThreadCount.toLong
+ })
- case class DispatcherMetricRecorder(maximumPoolSize: Histogram, runningThreadCount: Histogram,
- queueTaskCount: Histogram, poolSize: Histogram)
- extends MetricGroupRecorder {
+ val runningThreads = gauge("running-threads", () ⇒ {
+ fjp.getRunningThreadCount.toLong
+ })
- def collect(context: CollectionContext): MetricGroupSnapshot =
- DispatcherMetricSnapshot(
- maximumPoolSize.collect(context),
- runningThreadCount.collect(context),
- queueTaskCount.collect(context),
- poolSize.collect(context))
+ val queuedTaskCount = gauge("queued-task-count", () ⇒ {
+ fjp.getQueuedTaskCount
+ })
+}
- def cleanup: Unit = {}
+object ForkJoinPoolDispatcherMetrics {
+ def factory(fjp: AkkaForkJoinPool) = new EntityRecorderFactory[ForkJoinPoolDispatcherMetrics] {
+ def category: String = AkkaDispatcherMetrics.Category
+ def createRecorder(instrumentFactory: InstrumentFactory) = new ForkJoinPoolDispatcherMetrics(fjp, instrumentFactory)
}
+}
- case class DispatcherMetricSnapshot(maximumPoolSize: Histogram.Snapshot, runningThreadCount: Histogram.Snapshot,
- queueTaskCount: Histogram.Snapshot, poolSize: Histogram.Snapshot) extends MetricGroupSnapshot {
+class ThreadPoolExecutorDispatcherMetrics(tpe: ThreadPoolExecutor, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val corePoolSize = gauge("core-pool-size", () ⇒ {
+ tpe.getCorePoolSize.toLong
+ })
- type GroupSnapshotType = DispatcherMetricSnapshot
+ val maxPoolSize = gauge("max-pool-size", () ⇒ {
+ tpe.getMaximumPoolSize.toLong
+ })
- def merge(that: DispatcherMetricSnapshot, context: CollectionContext): DispatcherMetricSnapshot =
- DispatcherMetricSnapshot(
- maximumPoolSize.merge(that.maximumPoolSize, context),
- runningThreadCount.merge(that.runningThreadCount, context),
- queueTaskCount.merge(that.queueTaskCount, context),
- poolSize.merge(that.poolSize, context))
+ val poolSize = gauge("pool-size", () ⇒ {
+ tpe.getPoolSize.toLong
+ })
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- (MaximumPoolSize -> maximumPoolSize),
- (RunningThreadCount -> runningThreadCount),
- (QueueTaskCount -> queueTaskCount),
- (PoolSize -> poolSize))
- }
+ val activeThreads = gauge("active-threads", () ⇒ {
+ tpe.getActiveCount.toLong
+ })
- val Factory = DispatcherMetricGroupFactory
+ val processedTasks = gauge("processed-tasks", DifferentialValueCollector(() ⇒ {
+ tpe.getTaskCount
+ }))
}
-case object DispatcherMetricGroupFactory extends MetricGroupFactory {
+object ThreadPoolExecutorDispatcherMetrics {
- import kamon.akka.DispatcherMetrics._
-
- type GroupRecorder = DispatcherMetricRecorder
-
- def create(config: Config, system: ActorSystem): DispatcherMetricRecorder = {
- val settings = config.getConfig("precision.dispatcher")
-
- val maximumPoolSizeConfig = settings.getConfig("maximum-pool-size")
- val runningThreadCountConfig = settings.getConfig("running-thread-count")
- val queueTaskCountConfig = settings.getConfig("queued-task-count")
- val poolSizeConfig = settings.getConfig("pool-size")
-
- new DispatcherMetricRecorder(
- Histogram.fromConfig(maximumPoolSizeConfig),
- Histogram.fromConfig(runningThreadCountConfig),
- Histogram.fromConfig(queueTaskCountConfig),
- Histogram.fromConfig(poolSizeConfig))
+ def factory(tpe: ThreadPoolExecutor) = new EntityRecorderFactory[ThreadPoolExecutorDispatcherMetrics] {
+ def category: String = AkkaDispatcherMetrics.Category
+ def createRecorder(instrumentFactory: InstrumentFactory) = new ThreadPoolExecutorDispatcherMetrics(tpe, instrumentFactory)
}
+}
+object AkkaDispatcherMetrics {
+ val Category = "akka-dispatcher"
}
diff --git a/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala b/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala
index 2eedf764..5c5bb05a 100644
--- a/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/RouterMetrics.scala
@@ -15,75 +15,26 @@
*/
package kamon.akka
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
import kamon.metric._
-import kamon.metric.instrument.{ Counter, Histogram }
+import kamon.metric.instrument.{ Time, InstrumentFactory }
-case class RouterMetrics(name: String) extends MetricGroupIdentity {
- val category = RouterMetrics
-}
-
-object RouterMetrics extends MetricGroupCategory {
- val name = "router"
-
- case object RoutingTime extends MetricIdentity { val name = "routing-time" }
- case object ProcessingTime extends MetricIdentity { val name = "processing-time" }
- case object TimeInMailbox extends MetricIdentity { val name = "time-in-mailbox" }
- case object Errors extends MetricIdentity { val name = "errors" }
-
- case class RouterMetricsRecorder(routingTime: Histogram, processingTime: Histogram, timeInMailbox: Histogram, errors: Counter) extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): RouterMetricSnapshot =
- RouterMetricSnapshot(routingTime.collect(context), processingTime.collect(context), timeInMailbox.collect(context), errors.collect(context))
-
- def cleanup: Unit = {
- routingTime.cleanup
- processingTime.cleanup
- timeInMailbox.cleanup
- errors.cleanup
- }
- }
-
- case class RouterMetricSnapshot(routingTime: Histogram.Snapshot, processingTime: Histogram.Snapshot, timeInMailbox: Histogram.Snapshot, errors: Counter.Snapshot) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = RouterMetricSnapshot
-
- def merge(that: RouterMetricSnapshot, context: CollectionContext): RouterMetricSnapshot =
- RouterMetricSnapshot(
- routingTime.merge(that.routingTime, context),
- processingTime.merge(that.processingTime, context),
- timeInMailbox.merge(that.timeInMailbox, context),
- errors.merge(that.errors, context))
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- RoutingTime -> routingTime,
- ProcessingTime -> processingTime,
- TimeInMailbox -> timeInMailbox,
- Errors -> errors)
- }
-
- val Factory = RouterMetricGroupFactory
-}
-
-case object RouterMetricGroupFactory extends MetricGroupFactory {
-
- import kamon.akka.RouterMetrics._
-
- type GroupRecorder = RouterMetricsRecorder
-
- def create(config: Config, system: ActorSystem): RouterMetricsRecorder = {
- val settings = config.getConfig("precision.router")
-
- val routingTimeConfig = settings.getConfig("routing-time")
- val processingTimeConfig = settings.getConfig("processing-time")
- val timeInMailboxConfig = settings.getConfig("time-in-mailbox")
-
- new RouterMetricsRecorder(
- Histogram.fromConfig(routingTimeConfig),
- Histogram.fromConfig(processingTimeConfig),
- Histogram.fromConfig(timeInMailboxConfig),
- Counter())
- }
+/**
+ * Entity recorder for Akka Routers. The metrics being tracked are:
+ *
+ * - routing-time: Time taken for the router to process the routing logic.
+ * - time-in-mailbox: Time spent from the instant when a message is enqueued in a actor's mailbox to the instant when
+ * that message is dequeued for processing.
+ * - processing-time: Time taken for the actor to process the receive function.
+ * - errors: Number or errors seen by the actor's supervision mechanism.
+ */
+class RouterMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val routingTime = histogram("routing-time", Time.Nanoseconds)
+ val timeInMailbox = histogram("time-in-mailbox", Time.Nanoseconds)
+ val processingTime = histogram("processing-time", Time.Nanoseconds)
+ val errors = counter("errors")
}
+object RouterMetrics extends EntityRecorderFactory[RouterMetrics] {
+ def category: String = "akka-router"
+ def createRecorder(instrumentFactory: InstrumentFactory): RouterMetrics = new RouterMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala
index 78d88583..c961737d 100644
--- a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorCellInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorCellInstrumentation.scala
@@ -19,11 +19,8 @@ package akka.kamon.instrumentation
import akka.actor._
import akka.dispatch.{ Envelope, MessageDispatcher }
import akka.routing.RoutedActorCell
-import kamon.Kamon
import kamon.akka.{ RouterMetrics, ActorMetrics }
-import ActorMetrics.ActorMetricsRecorder
-import RouterMetrics.RouterMetricsRecorder
-import kamon.metric.Metrics
+import kamon.metric.{ Metrics, Entity }
import kamon.trace._
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -36,12 +33,13 @@ class ActorCellInstrumentation {
@After("actorCellCreation(cell, system, ref, props, dispatcher, parent)")
def afterCreation(cell: ActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, parent: ActorRef): Unit = {
- val metricsExtension = Kamon(Metrics)(system)
- val metricIdentity = ActorMetrics(ref.path.elements.mkString("/"))
- val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+ Metrics.get(system).register(ActorMetrics, ref.path.elements.mkString("/")).map { registration ⇒
+ val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
+
+ cellMetrics.entity = registration.entity
+ cellMetrics.recorder = Some(registration.recorder)
+ }
- cellMetrics.actorMetricIdentity = metricIdentity
- cellMetrics.actorMetricsRecorder = metricsExtension.register(metricIdentity, ActorMetrics.Factory)
}
@Pointcut("execution(* akka.actor.ActorCell.invoke(*)) && this(cell) && args(envelope)")
@@ -54,11 +52,11 @@ class ActorCellInstrumentation {
val contextAndTimestamp = envelope.asInstanceOf[TimestampedTraceContextAware]
try {
- TraceRecorder.withInlineTraceContextReplacement(contextAndTimestamp.traceContext) {
+ TraceContext.withContext(contextAndTimestamp.traceContext) {
pjp.proceed()
}
} finally {
- cellMetrics.actorMetricsRecorder.map { am ⇒
+ cellMetrics.recorder.map { am ⇒
val processingTime = System.nanoTime() - timestampBeforeProcessing
val timeInMailbox = timestampBeforeProcessing - contextAndTimestamp.captureNanoTime
@@ -81,7 +79,7 @@ class ActorCellInstrumentation {
@After("sendMessageInActorCell(cell, envelope)")
def afterSendMessageInActorCell(cell: ActorCell, envelope: Envelope): Unit = {
val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
- cellMetrics.actorMetricsRecorder.map(_.mailboxSize.increment())
+ cellMetrics.recorder.map(_.mailboxSize.increment())
}
@Pointcut("execution(* akka.actor.ActorCell.stop()) && this(cell)")
@@ -90,15 +88,15 @@ class ActorCellInstrumentation {
@After("actorStop(cell)")
def afterStop(cell: ActorCell): Unit = {
val cellMetrics = cell.asInstanceOf[ActorCellMetrics]
- cellMetrics.actorMetricsRecorder.map { _ ⇒
- Kamon(Metrics)(cell.system).unregister(cellMetrics.actorMetricIdentity)
+ cellMetrics.recorder.map { _ ⇒
+ Metrics.get(cell.system).unregister(cellMetrics.entity)
}
// The Stop can't be captured from the RoutedActorCell so we need to put this piece of cleanup here.
if (cell.isInstanceOf[RoutedActorCell]) {
val routedCellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
- routedCellMetrics.routerMetricsRecorder.map { _ ⇒
- Kamon(Metrics)(cell.system).unregister(routedCellMetrics.routerMetricIdentity)
+ routedCellMetrics.routerRecorder.map { _ ⇒
+ Metrics.get(cell.system).unregister(routedCellMetrics.routerEntity)
}
}
}
@@ -109,7 +107,7 @@ class ActorCellInstrumentation {
@Before("actorInvokeFailure(cell)")
def beforeInvokeFailure(cell: ActorCell): Unit = {
val cellWithMetrics = cell.asInstanceOf[ActorCellMetrics]
- cellWithMetrics.actorMetricsRecorder.map(_.errors.increment())
+ cellWithMetrics.recorder.map(_.errors.increment())
// In case that this actor is behind a router, count the errors for the router as well.
val envelope = cell.currentMessage.asInstanceOf[RouterAwareEnvelope]
@@ -125,12 +123,12 @@ class RoutedActorCellInstrumentation {
@After("routedActorCellCreation(cell, system, ref, props, dispatcher, routeeProps, supervisor)")
def afterRoutedActorCellCreation(cell: RoutedActorCell, system: ActorSystem, ref: ActorRef, props: Props, dispatcher: MessageDispatcher, routeeProps: Props, supervisor: ActorRef): Unit = {
- val metricsExtension = Kamon(Metrics)(system)
- val metricIdentity = RouterMetrics(ref.path.elements.mkString("/"))
- val cellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
+ Metrics.get(system).register(RouterMetrics, ref.path.elements.mkString("/")).map { registration ⇒
+ val cellMetrics = cell.asInstanceOf[RoutedActorCellMetrics]
- cellMetrics.routerMetricIdentity = metricIdentity
- cellMetrics.routerMetricsRecorder = metricsExtension.register(metricIdentity, RouterMetrics.Factory)
+ cellMetrics.routerEntity = registration.entity
+ cellMetrics.routerRecorder = Some(registration.recorder)
+ }
}
@Pointcut("execution(* akka.routing.RoutedActorCell.sendMessage(*)) && this(cell) && args(envelope)")
@@ -143,15 +141,15 @@ class RoutedActorCellInstrumentation {
val contextAndTimestamp = envelope.asInstanceOf[TimestampedTraceContextAware]
try {
- TraceRecorder.withInlineTraceContextReplacement(contextAndTimestamp.traceContext) {
+ TraceContext.withContext(contextAndTimestamp.traceContext) {
// The router metrics recorder will only be picked up if the message is sent from a tracked router.
- RouterAwareEnvelope.dynamicRouterMetricsRecorder.withValue(cellMetrics.routerMetricsRecorder) {
+ RouterAwareEnvelope.dynamicRouterMetricsRecorder.withValue(cellMetrics.routerRecorder) {
pjp.proceed()
}
}
} finally {
- cellMetrics.routerMetricsRecorder map { routerRecorder ⇒
+ cellMetrics.routerRecorder map { routerRecorder ⇒
routerRecorder.routingTime.record(System.nanoTime() - timestampBeforeProcessing)
}
}
@@ -159,25 +157,25 @@ class RoutedActorCellInstrumentation {
}
trait ActorCellMetrics {
- var actorMetricIdentity: ActorMetrics = _
- var actorMetricsRecorder: Option[ActorMetricsRecorder] = _
+ var entity: Entity = _
+ var recorder: Option[ActorMetrics] = None
}
trait RoutedActorCellMetrics {
- var routerMetricIdentity: RouterMetrics = _
- var routerMetricsRecorder: Option[RouterMetricsRecorder] = _
+ var routerEntity: Entity = _
+ var routerRecorder: Option[RouterMetrics] = None
}
trait RouterAwareEnvelope {
- def routerMetricsRecorder: Option[RouterMetricsRecorder]
+ def routerMetricsRecorder: Option[RouterMetrics]
}
object RouterAwareEnvelope {
import scala.util.DynamicVariable
- private[kamon] val dynamicRouterMetricsRecorder = new DynamicVariable[Option[RouterMetricsRecorder]](None)
+ private[kamon] val dynamicRouterMetricsRecorder = new DynamicVariable[Option[RouterMetrics]](None)
def default: RouterAwareEnvelope = new RouterAwareEnvelope {
- val routerMetricsRecorder: Option[RouterMetricsRecorder] = dynamicRouterMetricsRecorder.value
+ val routerMetricsRecorder: Option[RouterMetrics] = dynamicRouterMetricsRecorder.value
}
}
diff --git a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala
index e0e5d316..dd998c6b 100644
--- a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorLoggingInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorLoggingInstrumentation.scala
@@ -17,7 +17,7 @@
package akka.kamon.instrumentation
import kamon.trace.logging.MdcKeysSupport
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -41,7 +41,7 @@ class ActorLoggingInstrumentation extends MdcKeysSupport {
@Around("withMdcInvocation(logSource, logEvent, logStatement)")
def aroundWithMdcInvocation(pjp: ProceedingJoinPoint, logSource: String, logEvent: TraceContextAware, logStatement: () ⇒ _): Unit = {
- TraceRecorder.withInlineTraceContextReplacement(logEvent.traceContext) {
+ TraceContext.withContext(logEvent.traceContext) {
withMdc {
pjp.proceed()
}
diff --git a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala
index 48016876..0cb4ef13 100644
--- a/kamon-akka/src/main/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentation.scala
@@ -17,7 +17,7 @@
package akka.kamon.instrumentation
import akka.dispatch.sysmsg.EarliestFirstSystemMessageList
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -31,7 +31,7 @@ class ActorSystemMessageInstrumentation {
def aroundSystemMessageInvoke(pjp: ProceedingJoinPoint, messages: EarliestFirstSystemMessageList): Any = {
if (messages.nonEmpty) {
val ctx = messages.head.asInstanceOf[TraceContextAware].traceContext
- TraceRecorder.withInlineTraceContextReplacement(ctx)(pjp.proceed())
+ TraceContext.withContext(ctx)(pjp.proceed())
} else pjp.proceed()
}
@@ -73,7 +73,7 @@ class TraceContextIntoRepointableActorRefMixin {
@Around("repointableActorRefCreation(repointableActorRef)")
def afterRepointableActorRefCreation(pjp: ProceedingJoinPoint, repointableActorRef: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(repointableActorRef.traceContext) {
+ TraceContext.withContext(repointableActorRef.traceContext) {
pjp.proceed()
}
}
diff --git a/kamon-akka/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala
index ebddbfc8..28bfcae9 100644
--- a/kamon-akka/src/main/scala/kamon/instrumentation/akka/AskPatternInstrumentation.scala
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/AskPatternInstrumentation.scala
@@ -16,40 +16,49 @@
package akka.kamon.instrumentation
+import akka.util.Timeout
import kamon.Kamon
import kamon.akka.Akka
-import kamon.trace.{ TraceRecorder, TraceContext, EmptyTraceContext, TraceContextAware }
-import akka.actor.{ ActorSystem, ActorRef }
+import kamon.trace.{ TraceContext, EmptyTraceContext, TraceContextAware }
+import akka.actor.{ InternalActorRef, ActorSystem, ActorRef }
import akka.event.Logging.Warning
-import akka.pattern.AskTimeoutException
+import akka.pattern.{ PromiseActorRef, AskTimeoutException }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
import org.aspectj.lang.reflect.SourceLocation
import scala.concurrent.Future
import scala.compat.Platform.EOL
+import scala.concurrent.duration.FiniteDuration
@Aspect
class AskPatternInstrumentation {
import AskPatternInstrumentation._
- @Pointcut("call(* akka.pattern.AskableActorRef$.$qmark$extension(..)) && args(actor, *, *)")
- def askableActorRefAsk(actor: ActorRef): Unit = {}
+ @Pointcut("call(* akka.pattern.AskableActorRef$.$qmark$extension(..)) && args(actor, *, timeout)")
+ def askableActorRefAsk(actor: ActorRef, timeout: Timeout): Unit = {}
- @Around("askableActorRefAsk(actor)")
- def hookAskTimeoutWarning(pjp: ProceedingJoinPoint, actor: ActorRef): AnyRef =
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val akkaExtension = Kamon(Akka)(system)
- val future = pjp.proceed().asInstanceOf[Future[AnyRef]]
+ @Around("askableActorRefAsk(actor, timeout)")
+ def hookAskTimeoutWarning(pjp: ProceedingJoinPoint, actor: ActorRef, timeout: Timeout): AnyRef =
+ TraceContext.map { ctx ⇒
+ actor match {
+ // the AskPattern will only work for InternalActorRef's with these conditions.
+ case ref: InternalActorRef if !ref.isTerminated && timeout.duration.length > 0 ⇒
+ val akkaExtension = ctx.lookupExtension(Akka)
+ val future = pjp.proceed().asInstanceOf[Future[AnyRef]]
+ val system = ref.provider.guardian.underlying.system
- val handler = akkaExtension.askPatternTimeoutWarning match {
- case "off" ⇒ None
- case "lightweight" ⇒ Some(errorHandler(callInfo = Some(CallInfo(s"${actor.path.name} ?", pjp.getSourceLocation)))(system))
- case "heavyweight" ⇒ Some(errorHandler(stack = Some(new StackTraceCaptureException))(system))
- }
+ val handler = akkaExtension.askPatternTimeoutWarning match {
+ case "off" ⇒ None
+ case "lightweight" ⇒ Some(errorHandler(callInfo = Some(CallInfo(s"${actor.path.name} ?", pjp.getSourceLocation)))(system))
+ case "heavyweight" ⇒ Some(errorHandler(stack = Some(new StackTraceCaptureException))(system))
+ }
+
+ handler.map(future.onFailure(_)(akkaExtension.dispatcher))
+ future
- handler.map(future.onFailure(_)(akkaExtension.dispatcher))
- future
+ case _ ⇒ pjp.proceed() //
+ }
} getOrElse (pjp.proceed())
diff --git a/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala b/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala
new file mode 100644
index 00000000..f4bc31c4
--- /dev/null
+++ b/kamon-akka/src/main/scala/kamon/akka/instrumentation/DispatcherInstrumentation.scala
@@ -0,0 +1,168 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package akka.kamon.instrumentation
+
+import java.util.concurrent.{ ExecutorService, ThreadPoolExecutor }
+
+import akka.actor.{ ActorSystem, ActorSystemImpl }
+import akka.dispatch.ForkJoinExecutorConfigurator.AkkaForkJoinPool
+import akka.dispatch._
+import akka.kamon.instrumentation.LookupDataAware.LookupData
+import kamon.akka.{ AkkaDispatcherMetrics, ThreadPoolExecutorDispatcherMetrics, ForkJoinPoolDispatcherMetrics }
+import kamon.metric.{ Metrics, Entity }
+import org.aspectj.lang.ProceedingJoinPoint
+import org.aspectj.lang.annotation._
+
+@Aspect
+class DispatcherInstrumentation {
+
+ @Pointcut("execution(* akka.actor.ActorSystemImpl.start(..)) && this(system)")
+ def actorSystemInitialization(system: ActorSystemImpl): Unit = {}
+
+ @Before("actorSystemInitialization(system)")
+ def afterActorSystemInitialization(system: ActorSystemImpl): Unit = {
+ system.dispatchers.asInstanceOf[ActorSystemAware].actorSystem = system
+
+ // The default dispatcher for the actor system is looked up in the ActorSystemImpl's initialization code and we
+ // can't get the Metrics extension there since the ActorSystem is not yet fully constructed. To workaround that
+ // we are manually selecting and registering the default dispatcher with the Metrics extension. All other dispatchers
+ // will by registered by the instrumentation bellow.
+
+ // Yes, reflection sucks, but this piece of code is only executed once on ActorSystem's startup.
+ val defaultDispatcher = system.dispatcher
+ val executorServiceDelegateField = defaultDispatcher.getClass.getDeclaredField("executorServiceDelegate")
+ executorServiceDelegateField.setAccessible(true)
+
+ val lazyExecutorServiceDelegate = executorServiceDelegateField.get(defaultDispatcher)
+ val executorField = lazyExecutorServiceDelegate.getClass.getMethod("executor")
+ executorField.setAccessible(true)
+
+ val defaultDispatcherExecutor = executorField.invoke(lazyExecutorServiceDelegate).asInstanceOf[ExecutorService]
+ registerDispatcher(Dispatchers.DefaultDispatcherId, defaultDispatcherExecutor, system)
+ }
+
+ private def registerDispatcher(dispatcherName: String, executorService: ExecutorService, system: ActorSystem): Unit =
+ executorService match {
+ case fjp: AkkaForkJoinPool ⇒
+ Metrics.get(system).register(ForkJoinPoolDispatcherMetrics.factory(fjp), dispatcherName)
+
+ case tpe: ThreadPoolExecutor ⇒
+ Metrics.get(system).register(ThreadPoolExecutorDispatcherMetrics.factory(tpe), dispatcherName)
+
+ case others ⇒ // Currently not interested in other kinds of dispatchers.
+ }
+
+ @Pointcut("execution(* akka.dispatch.Dispatchers.lookup(..)) && this(dispatchers) && args(dispatcherName)")
+ def dispatchersLookup(dispatchers: ActorSystemAware, dispatcherName: String) = {}
+
+ @Around("dispatchersLookup(dispatchers, dispatcherName)")
+ def aroundDispatchersLookup(pjp: ProceedingJoinPoint, dispatchers: ActorSystemAware, dispatcherName: String): Any =
+ LookupDataAware.withLookupData(LookupData(dispatcherName, dispatchers.actorSystem)) {
+ pjp.proceed()
+ }
+
+ @Pointcut("initialization(akka.dispatch.ExecutorServiceFactory.new(..)) && target(factory)")
+ def executorServiceFactoryInitialization(factory: LookupDataAware): Unit = {}
+
+ @After("executorServiceFactoryInitialization(factory)")
+ def afterExecutorServiceFactoryInitialization(factory: LookupDataAware): Unit =
+ factory.lookupData = LookupDataAware.currentLookupData
+
+ @Pointcut("execution(* akka.dispatch.ExecutorServiceFactory+.createExecutorService()) && this(factory) && !cflow(execution(* akka.dispatch.Dispatcher.shutdown()))")
+ def createExecutorService(factory: LookupDataAware): Unit = {}
+
+ @AfterReturning(pointcut = "createExecutorService(factory)", returning = "executorService")
+ def afterCreateExecutorService(factory: LookupDataAware, executorService: ExecutorService): Unit = {
+ val lookupData = factory.lookupData
+
+ // lookupData.actorSystem will be null only during the first lookup of the default dispatcher during the
+ // ActorSystemImpl's initialization.
+ if (lookupData.actorSystem != null)
+ registerDispatcher(lookupData.dispatcherName, executorService, lookupData.actorSystem)
+ }
+
+ @Pointcut("initialization(akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.new(..)) && this(lazyExecutor)")
+ def lazyExecutorInitialization(lazyExecutor: LookupDataAware): Unit = {}
+
+ @After("lazyExecutorInitialization(lazyExecutor)")
+ def afterLazyExecutorInitialization(lazyExecutor: LookupDataAware): Unit =
+ lazyExecutor.lookupData = LookupDataAware.currentLookupData
+
+ @Pointcut("execution(* akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.copy()) && this(lazyExecutor)")
+ def lazyExecutorCopy(lazyExecutor: LookupDataAware): Unit = {}
+
+ @Around("lazyExecutorCopy(lazyExecutor)")
+ def aroundLazyExecutorCopy(pjp: ProceedingJoinPoint, lazyExecutor: LookupDataAware): Any =
+ LookupDataAware.withLookupData(lazyExecutor.lookupData) {
+ pjp.proceed()
+ }
+
+ @Pointcut("execution(* akka.dispatch.Dispatcher.LazyExecutorServiceDelegate.shutdown()) && this(lazyExecutor)")
+ def lazyExecutorShutdown(lazyExecutor: LookupDataAware): Unit = {}
+
+ @After("lazyExecutorShutdown(lazyExecutor)")
+ def afterLazyExecutorShutdown(lazyExecutor: LookupDataAware): Unit = {
+ import lazyExecutor.lookupData
+
+ if (lookupData.actorSystem != null)
+ Metrics.get(lookupData.actorSystem).unregister(Entity(lookupData.dispatcherName, AkkaDispatcherMetrics.Category))
+ }
+
+}
+
+@Aspect
+class DispatcherMetricCollectionInfoIntoDispatcherMixin {
+
+ @DeclareMixin("akka.dispatch.Dispatchers")
+ def mixinActorSystemAwareToDispatchers: ActorSystemAware = ActorSystemAware()
+
+ @DeclareMixin("akka.dispatch.Dispatcher.LazyExecutorServiceDelegate")
+ def mixinLookupDataAwareToExecutors: LookupDataAware = LookupDataAware()
+
+ @DeclareMixin("akka.dispatch.ExecutorServiceFactory+")
+ def mixinActorSystemAwareToDispatcher: LookupDataAware = LookupDataAware()
+}
+
+trait ActorSystemAware {
+ @volatile var actorSystem: ActorSystem = _
+}
+
+object ActorSystemAware {
+ def apply(): ActorSystemAware = new ActorSystemAware {}
+}
+
+trait LookupDataAware {
+ @volatile var lookupData: LookupData = _
+}
+
+object LookupDataAware {
+ case class LookupData(dispatcherName: String, actorSystem: ActorSystem)
+
+ private val _currentDispatcherLookupData = new ThreadLocal[LookupData]
+
+ def apply() = new LookupDataAware {}
+
+ def currentLookupData: LookupData = _currentDispatcherLookupData.get()
+
+ def withLookupData[T](lookupData: LookupData)(thunk: ⇒ T): T = {
+ _currentDispatcherLookupData.set(lookupData)
+ val result = thunk
+ _currentDispatcherLookupData.remove()
+
+ result
+ }
+} \ No newline at end of file
diff --git a/kamon-akka/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala b/kamon-akka/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
deleted file mode 100644
index 8280edca..00000000
--- a/kamon-akka/src/main/scala/kamon/instrumentation/akka/DispatcherInstrumentation.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package akka.kamon.instrumentation
-
-import java.lang.reflect.Method
-import java.util.concurrent.ThreadPoolExecutor
-
-import akka.actor.{ ActorSystemImpl, Cancellable }
-import akka.dispatch.{ Dispatcher, Dispatchers, ExecutorServiceDelegate, MessageDispatcher }
-import akka.kamon.instrumentation.DispatcherMetricsCollector.DispatcherMetricsMeasurement
-import kamon.Kamon
-import kamon.akka.DispatcherMetrics
-import DispatcherMetrics.DispatcherMetricRecorder
-import kamon.metric.Metrics
-import org.aspectj.lang.annotation._
-
-import scala.concurrent.forkjoin.ForkJoinPool
-
-@Aspect
-class DispatcherInstrumentation {
-
- @Pointcut("execution(akka.dispatch.Dispatchers.new(..)) && this(dispatchers) && cflow(execution(akka.actor.ActorSystemImpl.new(..)) && this(system))")
- def onActorSystemStartup(dispatchers: Dispatchers, system: ActorSystemImpl) = {}
-
- @Before("onActorSystemStartup(dispatchers, system)")
- def beforeActorSystemStartup(dispatchers: Dispatchers, system: ActorSystemImpl): Unit = {
- val currentDispatchers = dispatchers.asInstanceOf[DispatchersWithActorSystem]
- currentDispatchers.actorSystem = system
- }
-
- @Pointcut("execution(* akka.dispatch.Dispatchers.lookup(..)) && this(dispatchers)")
- def onDispatchersLookup(dispatchers: Dispatchers) = {}
-
- @AfterReturning(pointcut = "onDispatchersLookup(dispatchers)", returning = "dispatcher")
- def afterReturningLookup(dispatchers: Dispatchers, dispatcher: Dispatcher): Unit = {
- val dispatchersWithActorSystem = dispatchers.asInstanceOf[DispatchersWithActorSystem]
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
-
- dispatcherWithMetrics.actorSystem = dispatchersWithActorSystem.actorSystem
- }
-
- @Pointcut("call(* akka.dispatch.ExecutorServiceFactory.createExecutorService(..))")
- def onCreateExecutorService(): Unit = {}
-
- @Pointcut("cflow((execution(* akka.dispatch.MessageDispatcher.registerForExecution(..)) || execution(* akka.dispatch.MessageDispatcher.executeTask(..))) && this(dispatcher))")
- def onCflowMessageDispatcher(dispatcher: Dispatcher): Unit = {}
-
- @Pointcut("onCreateExecutorService() && onCflowMessageDispatcher(dispatcher)")
- def onDispatcherStartup(dispatcher: Dispatcher): Unit = {}
-
- @After("onDispatcherStartup(dispatcher)")
- def afterDispatcherStartup(dispatcher: MessageDispatcher): Unit = {
-
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
- val metricsExtension = Kamon(Metrics)(dispatcherWithMetrics.actorSystem)
- val metricIdentity = DispatcherMetrics(dispatcher.id)
-
- dispatcherWithMetrics.metricIdentity = metricIdentity
- dispatcherWithMetrics.dispatcherMetricsRecorder = metricsExtension.register(metricIdentity, DispatcherMetrics.Factory)
-
- if (dispatcherWithMetrics.dispatcherMetricsRecorder.isDefined) {
- dispatcherWithMetrics.dispatcherCollectorCancellable = metricsExtension.scheduleGaugeRecorder {
- dispatcherWithMetrics.dispatcherMetricsRecorder.map {
- dm ⇒
- val DispatcherMetricsMeasurement(maximumPoolSize, runningThreadCount, queueTaskCount, poolSize) =
- DispatcherMetricsCollector.collect(dispatcher)
-
- dm.maximumPoolSize.record(maximumPoolSize)
- dm.runningThreadCount.record(runningThreadCount)
- dm.queueTaskCount.record(queueTaskCount)
- dm.poolSize.record(poolSize)
- }
- }
- }
- }
-
- @Pointcut("execution(* akka.dispatch.MessageDispatcher.shutdown(..)) && this(dispatcher)")
- def onDispatcherShutdown(dispatcher: MessageDispatcher): Unit = {}
-
- @After("onDispatcherShutdown(dispatcher)")
- def afterDispatcherShutdown(dispatcher: MessageDispatcher): Unit = {
- val dispatcherWithMetrics = dispatcher.asInstanceOf[DispatcherMetricCollectionInfo]
-
- dispatcherWithMetrics.dispatcherMetricsRecorder.map {
- dispatcher ⇒
- dispatcherWithMetrics.dispatcherCollectorCancellable.cancel()
- Kamon(Metrics)(dispatcherWithMetrics.actorSystem).unregister(dispatcherWithMetrics.metricIdentity)
- }
- }
-}
-
-@Aspect
-class DispatcherMetricCollectionInfoIntoDispatcherMixin {
-
- @DeclareMixin("akka.dispatch.MessageDispatcher")
- def mixinDispatcherMetricsToMessageDispatcher: DispatcherMetricCollectionInfo = new DispatcherMetricCollectionInfo {}
-
- @DeclareMixin("akka.dispatch.Dispatchers")
- def mixinDispatchersToDispatchersWithActorSystem: DispatchersWithActorSystem = new DispatchersWithActorSystem {}
-}
-
-trait DispatcherMetricCollectionInfo {
- var metricIdentity: DispatcherMetrics = _
- var dispatcherMetricsRecorder: Option[DispatcherMetricRecorder] = _
- var dispatcherCollectorCancellable: Cancellable = _
- var actorSystem: ActorSystemImpl = _
-}
-
-trait DispatchersWithActorSystem {
- var actorSystem: ActorSystemImpl = _
-}
-
-object DispatcherMetricsCollector {
-
- case class DispatcherMetricsMeasurement(maximumPoolSize: Long, runningThreadCount: Long, queueTaskCount: Long, poolSize: Long)
-
- private def collectForkJoinMetrics(pool: ForkJoinPool): DispatcherMetricsMeasurement = {
- DispatcherMetricsMeasurement(pool.getParallelism, pool.getActiveThreadCount,
- (pool.getQueuedTaskCount + pool.getQueuedSubmissionCount), pool.getPoolSize)
- }
-
- private def collectExecutorMetrics(pool: ThreadPoolExecutor): DispatcherMetricsMeasurement = {
- DispatcherMetricsMeasurement(pool.getMaximumPoolSize, pool.getActiveCount, pool.getQueue.size(), pool.getPoolSize)
- }
-
- private val executorServiceMethod: Method = {
- // executorService is protected
- val method = classOf[Dispatcher].getDeclaredMethod("executorService")
- method.setAccessible(true)
- method
- }
-
- def collect(dispatcher: MessageDispatcher): DispatcherMetricsMeasurement = {
- dispatcher match {
- case x: Dispatcher ⇒ {
- val executor = executorServiceMethod.invoke(x) match {
- case delegate: ExecutorServiceDelegate ⇒ delegate.executor
- case other ⇒ other
- }
-
- executor match {
- case fjp: ForkJoinPool ⇒ collectForkJoinMetrics(fjp)
- case tpe: ThreadPoolExecutor ⇒ collectExecutorMetrics(tpe)
- case anything ⇒ DispatcherMetricsMeasurement(0L, 0L, 0L, 0L)
- }
- }
- case _ ⇒ new DispatcherMetricsMeasurement(0L, 0L, 0L, 0L)
- }
- }
-}
diff --git a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala
index 06a232bd..8f7ae613 100644
--- a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorCellInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorCellInstrumentationSpec.scala
@@ -15,35 +15,32 @@
* ========================================================== */
package kamon.instrumentation.akka
-import akka.actor.{ Actor, ActorSystem, Props }
+import akka.actor.{ Actor, Props }
import akka.pattern.{ ask, pipe }
import akka.routing._
-import akka.testkit.{ TestKitBase, ImplicitSender, TestKit }
import akka.util.Timeout
-import com.typesafe.config.ConfigFactory
-import kamon.trace.TraceRecorder
-import org.scalatest.{ BeforeAndAfterAll, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
import scala.concurrent.duration._
-class ActorCellInstrumentationSpec extends TestKitBase with WordSpecLike with ImplicitSender with BeforeAndAfterAll {
- implicit lazy val system: ActorSystem = ActorSystem("actor-cell-instrumentation-spec")
- implicit val executionContext = system.dispatcher
+class ActorCellInstrumentationSpec extends BaseKamonSpec("actor-cell-instrumentation-spec") {
+ implicit lazy val executionContext = system.dispatcher
"the message passing instrumentation" should {
"propagate the TraceContext using bang" in new EchoActorFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("bang-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("bang-reply")) {
ctxEchoActor ! "test"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"propagate the TraceContext using tell" in new EchoActorFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("tell-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("tell-reply")) {
ctxEchoActor.tell("test", testActor)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
@@ -51,37 +48,37 @@ class ActorCellInstrumentationSpec extends TestKitBase with WordSpecLike with Im
"propagate the TraceContext using ask" in new EchoActorFixture {
implicit val timeout = Timeout(1 seconds)
- val testTraceContext = TraceRecorder.withNewTraceContext("ask-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("ask-reply")) {
// The pipe pattern use Futures internally, so FutureTracing test should cover the underpinnings of it.
(ctxEchoActor ? "test") pipeTo (testActor)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"propagate the TraceContext to actors behind a simple router" in new EchoSimpleRouterFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("router-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("router-reply")) {
router.route("test", testActor)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"propagate the TraceContext to actors behind a pool router" in new EchoPoolRouterFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("router-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("router-reply")) {
pool ! "test"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"propagate the TraceContext to actors behind a group router" in new EchoGroupRouterFixture {
- val testTraceContext = TraceRecorder.withNewTraceContext("router-reply") {
+ val testTraceContext = TraceContext.withContext(newContext("router-reply")) {
group ! "test"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
@@ -119,7 +116,7 @@ class ActorCellInstrumentationSpec extends TestKitBase with WordSpecLike with Im
class TraceContextEcho extends Actor {
def receive = {
- case msg: String ⇒ sender ! TraceRecorder.currentContext
+ case msg: String ⇒ sender ! TraceContext.currentContext
}
}
diff --git a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala
index 598e9327..21706af9 100644
--- a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorLoggingInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorLoggingInstrumentationSpec.scala
@@ -15,28 +15,33 @@
* ========================================================== */
package kamon.instrumentation.akka
-import akka.actor.{ Actor, ActorLogging, ActorSystem, Props }
+import akka.actor.{ Actor, ActorLogging, Props }
import akka.event.Logging.LogEvent
-import akka.testkit.TestKitBase
import com.typesafe.config.ConfigFactory
+import kamon.testkit.BaseKamonSpec
import kamon.trace.TraceLocal.AvailableToMdc
import kamon.trace.logging.MdcKeysSupport
-import kamon.trace.{ TraceContextAware, TraceLocal, TraceRecorder }
-import org.scalatest.{ BeforeAndAfterAll, Inspectors, Matchers, WordSpecLike }
+import kamon.trace.{ TraceContextAware, TraceLocal, TraceContext }
+import org.scalatest.Inspectors
import org.slf4j.MDC
-class ActorLoggingInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with Inspectors with MdcKeysSupport with BeforeAndAfterAll {
- implicit lazy val system: ActorSystem = ActorSystem("actor-logging-instrumentation-spec",
- ConfigFactory.parseString("""akka.loggers = ["akka.event.slf4j.Slf4jLogger"]"""))
+class ActorLoggingInstrumentationSpec extends BaseKamonSpec("actor-logging-instrumentation-spec") with Inspectors with MdcKeysSupport {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loggers = ["akka.event.slf4j.Slf4jLogger"]
+ |}
+ """.stripMargin)
"the ActorLogging instrumentation" should {
"attach the TraceContext (if available) to log events" in {
val loggerActor = system.actorOf(Props[LoggerActor])
system.eventStream.subscribe(testActor, classOf[LogEvent])
- val testTraceContext = TraceRecorder.withNewTraceContext("logging") {
+ val testTraceContext = TraceContext.withContext(newContext("logging")) {
loggerActor ! "info"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
fishForMessage() {
@@ -50,7 +55,7 @@ class ActorLoggingInstrumentationSpec extends TestKitBase with WordSpecLike with
"allow retrieve a value from the MDC when was created a key of type AvailableToMdc" in {
val testString = "Hello World"
- TraceRecorder.withNewTraceContext("logging-with-mdc") {
+ TraceContext.withContext(newContext("logging-with-mdc")) {
TraceLocal.store(AvailableToMdc("some-cool-key"))(testString)
withMdc {
@@ -66,6 +71,6 @@ class ActorLoggingInstrumentationSpec extends TestKitBase with WordSpecLike with
class LoggerActor extends Actor with ActorLogging {
def receive = {
- case "info" ⇒ log.info("TraceContext(name = {}, token = {})", TraceRecorder.currentContext.name, TraceRecorder.currentContext.token)
+ case "info" ⇒ log.info("TraceContext(name = {}, token = {})", TraceContext.currentContext.name, TraceContext.currentContext.token)
}
}
diff --git a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala
index 0e9025af..8c1033ae 100644
--- a/kamon-akka/src/test/scala/kamon/instrumentation/akka/ActorSystemMessageInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/ActorSystemMessageInstrumentationSpec.scala
@@ -2,49 +2,53 @@ package kamon.instrumentation.akka
import akka.actor.SupervisorStrategy.{ Escalate, Restart, Resume, Stop }
import akka.actor._
-import akka.testkit.{ TestKitBase, ImplicitSender }
+import akka.testkit.ImplicitSender
import com.typesafe.config.ConfigFactory
-import kamon.trace.{ EmptyTraceContext, TraceRecorder }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ EmptyTraceContext, TraceContext }
import org.scalatest.WordSpecLike
import scala.concurrent.duration._
import scala.util.control.NonFatal
-class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLike with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("actor-system-message-instrumentation-spec", ConfigFactory.parseString(
- """
- |akka.loglevel = OFF
- """.stripMargin))
+class ActorSystemMessageInstrumentationSpec extends BaseKamonSpec("actor-system-message-instrumentation-spec") with WordSpecLike with ImplicitSender {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loglevel = OFF
+ |}
+ """.stripMargin)
- implicit val executionContext = system.dispatcher
+ implicit lazy val executionContext = system.dispatcher
"the system message passing instrumentation" should {
"keep the TraceContext while processing the Create message in top level actors" in {
- val testTraceContext = TraceRecorder.withNewTraceContext("creating-top-level-actor") {
+ val testTraceContext = TraceContext.withContext(newContext("creating-top-level-actor")) {
system.actorOf(Props(new Actor {
- testActor ! TraceRecorder.currentContext
+ testActor ! TraceContext.currentContext
def receive: Actor.Receive = { case any ⇒ }
}))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
}
"keep the TraceContext while processing the Create message in non top level actors" in {
- val testTraceContext = TraceRecorder.withNewTraceContext("creating-non-top-level-actor") {
+ val testTraceContext = TraceContext.withContext(newContext("creating-non-top-level-actor")) {
system.actorOf(Props(new Actor {
def receive: Actor.Receive = {
case any ⇒
context.actorOf(Props(new Actor {
- testActor ! TraceRecorder.currentContext
+ testActor ! TraceContext.currentContext
def receive: Actor.Receive = { case any ⇒ }
}))
}
})) ! "any"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext)
@@ -54,9 +58,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
"the actor is resumed" in {
val supervisor = supervisorWithDirective(Resume)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-resume") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-resume")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -69,9 +73,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
"the actor is restarted" in {
val supervisor = supervisorWithDirective(Restart, sendPreRestart = true, sendPostRestart = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-restart") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-restart")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -86,9 +90,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
"the actor is stopped" in {
val supervisor = supervisorWithDirective(Stop, sendPostStop = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-stop") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-stop")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -99,9 +103,9 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
"the failure is escalated" in {
val supervisor = supervisorWithDirective(Escalate, sendPostStop = true)
- val testTraceContext = TraceRecorder.withNewTraceContext("fail-and-escalate") {
+ val testTraceContext = TraceContext.withContext(newContext("fail-and-escalate")) {
supervisor ! "fail"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
expectMsg(testTraceContext) // From the parent executing the supervision strategy
@@ -119,7 +123,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
val child = context.actorOf(Props(new Parent))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
- case NonFatal(throwable) ⇒ testActor ! TraceRecorder.currentContext; Stop
+ case NonFatal(throwable) ⇒ testActor ! TraceContext.currentContext; Stop
}
def receive = {
@@ -131,7 +135,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
val child = context.actorOf(Props(new Child))
override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() {
- case NonFatal(throwable) ⇒ testActor ! TraceRecorder.currentContext; directive
+ case NonFatal(throwable) ⇒ testActor ! TraceContext.currentContext; directive
}
def receive: Actor.Receive = {
@@ -139,7 +143,7 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
}
override def postStop(): Unit = {
- if (sendPostStop) testActor ! TraceRecorder.currentContext
+ if (sendPostStop) testActor ! TraceContext.currentContext
super.postStop()
}
}
@@ -147,26 +151,26 @@ class ActorSystemMessageInstrumentationSpec extends TestKitBase with WordSpecLik
class Child extends Actor {
def receive = {
case "fail" ⇒ throw new ArithmeticException("Division by zero.")
- case "context" ⇒ sender ! TraceRecorder.currentContext
+ case "context" ⇒ sender ! TraceContext.currentContext
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
- if (sendPreRestart) testActor ! TraceRecorder.currentContext
+ if (sendPreRestart) testActor ! TraceContext.currentContext
super.preRestart(reason, message)
}
override def postRestart(reason: Throwable): Unit = {
- if (sendPostRestart) testActor ! TraceRecorder.currentContext
+ if (sendPostRestart) testActor ! TraceContext.currentContext
super.postRestart(reason)
}
override def postStop(): Unit = {
- if (sendPostStop) testActor ! TraceRecorder.currentContext
+ if (sendPostStop) testActor ! TraceContext.currentContext
super.postStop()
}
override def preStart(): Unit = {
- if (sendPreStart) testActor ! TraceRecorder.currentContext
+ if (sendPreStart) testActor ! TraceContext.currentContext
super.preStart()
}
}
diff --git a/kamon-akka/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala b/kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala
index 5c9905ba..0d63a19e 100644
--- a/kamon-akka/src/test/scala/kamon/instrumentation/akka/AskPatternInstrumentationSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/akka/instrumentation/AskPatternInstrumentationSpec.scala
@@ -21,21 +21,26 @@ import java.util.concurrent.atomic.AtomicInteger
import akka.actor._
import akka.event.Logging.Warning
import akka.pattern.ask
-import akka.testkit.{ TestProbe, TestKitBase }
+import akka.testkit.TestProbe
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import kamon.Kamon
import kamon.akka.Akka
-import kamon.trace.{ TraceContext, TraceContextAware, TraceRecorder }
-import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ TraceContext, TraceContextAware }
import scala.concurrent.duration._
-class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with BeforeAndAfterAll {
- implicit lazy val system: ActorSystem = ActorSystem("ask-pattern-tracing-spec",
- ConfigFactory.parseString("""akka.loggers = ["akka.event.slf4j.Slf4jLogger"]"""))
+class AskPatternInstrumentationSpec extends BaseKamonSpec("ask-pattern-tracing-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |akka {
+ | loglevel = OFF
+ |}
+ """.stripMargin)
- implicit val ec = system.dispatcher
+ implicit lazy val ec = system.dispatcher
implicit val askTimeout = Timeout(10 millis)
// TODO: Make this work with ActorSelections
@@ -46,9 +51,9 @@ class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with M
setAskPatternTimeoutWarningMode("heavyweight")
expectTimeoutWarning() {
- TraceRecorder.withNewTraceContext("ask-timeout-warning") {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
noReplyActorRef ? "hello"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
}
}
@@ -59,9 +64,9 @@ class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with M
setAskPatternTimeoutWarningMode("lightweight")
expectTimeoutWarning(messageSizeLimit = Some(1)) {
- TraceRecorder.withNewTraceContext("ask-timeout-warning") {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
noReplyActorRef ? "hello"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
}
}
@@ -72,9 +77,9 @@ class AskPatternInstrumentationSpec extends TestKitBase with WordSpecLike with M
setAskPatternTimeoutWarningMode("off")
expectTimeoutWarning(expectWarning = false) {
- TraceRecorder.withNewTraceContext("ask-timeout-warning") {
+ TraceContext.withContext(newContext("ask-timeout-warning")) {
noReplyActorRef ? "hello"
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
}
}
diff --git a/kamon-akka/src/test/scala/kamon/metric/ActorMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/metric/ActorMetricsSpec.scala
index 6d16386b..322abed2 100644
--- a/kamon-akka/src/test/scala/kamon/metric/ActorMetricsSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/metric/ActorMetricsSpec.scala
@@ -20,44 +20,29 @@ import java.nio.LongBuffer
import kamon.Kamon
import kamon.akka.ActorMetrics
import kamon.metric.ActorMetricsTestActor._
+import kamon.metric.instrument.CollectionContext
import org.scalatest.{ BeforeAndAfterAll, WordSpecLike, Matchers }
import akka.testkit.{ ImplicitSender, TestProbe, TestKitBase }
import akka.actor._
import com.typesafe.config.ConfigFactory
import scala.concurrent.duration._
-import ActorMetrics.{ ActorMetricsRecorder, ActorMetricSnapshot }
class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {
implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
"""
- |kamon.metrics {
+ |kamon.metric {
| tick-interval = 1 hour
| default-collection-context-buffer-size = 10
|
- | filters = [
- | {
- | actor {
- | includes = [ "user/tracked-*", "user/measuring-*", "user/clean-after-collect", "user/stop" ]
- | excludes = [ "user/tracked-explicitly-excluded"]
- | }
- | }
- | ]
- | precision.actor {
- | processing-time {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | time-in-mailbox {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
+ | filters {
+ | akka-actor {
+ | includes = [ "user/tracked-*", "user/measuring-*", "user/clean-after-collect", "user/stop" ]
+ | excludes = [ "user/tracked-explicitly-excluded", "user/non-tracked-actor" ]
| }
+ | }
|
- | mailbox-size {
- | refresh-interval = 1 hour
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
+ | instrument-settings {
+ | akka-actor.mailbox-size.refresh-interval = 1 hour
| }
|}
|
@@ -89,16 +74,16 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
expectMsg(Pong)
val firstSnapshot = collectMetricsOf(trackedActor).get
- firstSnapshot.errors.count should be(1L)
- firstSnapshot.mailboxSize.numberOfMeasurements should be > 0L
- firstSnapshot.processingTime.numberOfMeasurements should be(102L) // 102 examples
- firstSnapshot.timeInMailbox.numberOfMeasurements should be(102L) // 102 examples
+ firstSnapshot.counter("errors").get.count should be(1L)
+ firstSnapshot.minMaxCounter("mailbox-size").get.numberOfMeasurements should be > 0L
+ firstSnapshot.histogram("processing-time").get.numberOfMeasurements should be(102L) // 102 examples
+ firstSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(102L) // 102 examples
val secondSnapshot = collectMetricsOf(trackedActor).get // Ensure that the recorders are clean
- secondSnapshot.errors.count should be(0L)
- secondSnapshot.mailboxSize.numberOfMeasurements should be(3L) // min, max and current
- secondSnapshot.processingTime.numberOfMeasurements should be(0L)
- secondSnapshot.timeInMailbox.numberOfMeasurements should be(0L)
+ secondSnapshot.counter("errors").get.count should be(0L)
+ secondSnapshot.minMaxCounter("mailbox-size").get.numberOfMeasurements should be(3L) // min, max and current
+ secondSnapshot.histogram("processing-time").get.numberOfMeasurements should be(0L)
+ secondSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(0L)
}
}
@@ -109,9 +94,9 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = expectMsgType[TrackedTimings]
val snapshot = collectMetricsOf(trackedActor).get
- snapshot.processingTime.numberOfMeasurements should be(1L)
- snapshot.processingTime.recordsIterator.next().count should be(1L)
- snapshot.processingTime.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ snapshot.histogram("processing-time").get.numberOfMeasurements should be(1L)
+ snapshot.histogram("processing-time").get.recordsIterator.next().count should be(1L)
+ snapshot.histogram("processing-time").get.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
}
"record the number of errors" in new ActorMetricsFixtures {
@@ -122,7 +107,7 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
expectMsg(Pong)
val snapshot = collectMetricsOf(trackedActor).get
- snapshot.errors.count should be(10)
+ snapshot.counter("errors").get.count should be(10)
}
"record the mailbox-size" in new ActorMetricsFixtures {
@@ -138,8 +123,8 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
expectMsg(Pong)
val snapshot = collectMetricsOf(trackedActor).get
- snapshot.mailboxSize.min should be(0L)
- snapshot.mailboxSize.max should be(11L +- 1L)
+ snapshot.minMaxCounter("mailbox-size").get.min should be(0L)
+ snapshot.minMaxCounter("mailbox-size").get.max should be(11L +- 1L)
}
"record the time-in-mailbox" in new ActorMetricsFixtures {
@@ -149,20 +134,22 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = expectMsgType[TrackedTimings]
val snapshot = collectMetricsOf(trackedActor).get
- snapshot.timeInMailbox.numberOfMeasurements should be(1L)
- snapshot.timeInMailbox.recordsIterator.next().count should be(1L)
- snapshot.timeInMailbox.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ snapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(1L)
+ snapshot.histogram("time-in-mailbox").get.recordsIterator.next().count should be(1L)
+ snapshot.histogram("time-in-mailbox").get.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
}
"clean up the associated recorder when the actor is stopped" in new ActorMetricsFixtures {
val trackedActor = createTestActor("stop")
+ val firstRecorder = actorMetricsRecorderOf(trackedActor).get
+ // Killing the actor should remove it's ActorMetrics and registering again bellow should create a new one.
val deathWatcher = TestProbe()
deathWatcher.watch(trackedActor)
trackedActor ! PoisonPill
deathWatcher.expectTerminated(trackedActor)
- actorMetricsRecorderOf(trackedActor) shouldBe empty
+ actorMetricsRecorderOf(trackedActor).get shouldNot be theSameInstanceAs (firstRecorder)
}
}
@@ -175,10 +162,10 @@ class ActorMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
def actorRecorderName(ref: ActorRef): String = ref.path.elements.mkString("/")
- def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetricsRecorder] =
- Kamon(Metrics)(system).storage.get(ActorMetrics(actorRecorderName(ref))).map(_.asInstanceOf[ActorMetricsRecorder])
+ def actorMetricsRecorderOf(ref: ActorRef): Option[ActorMetrics] =
+ Kamon(Metrics)(system).register(ActorMetrics, actorRecorderName(ref)).map(_.recorder)
- def collectMetricsOf(ref: ActorRef): Option[ActorMetricSnapshot] = {
+ def collectMetricsOf(ref: ActorRef): Option[EntitySnapshot] = {
Thread.sleep(5) // Just in case the test advances a bit faster than the actor being tested.
actorMetricsRecorderOf(ref).map(_.collect(collectionContext))
}
diff --git a/kamon-akka/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
index 55af3f2e..2c530da9 100644
--- a/kamon-akka/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/metric/DispatcherMetricsSpec.scala
@@ -15,96 +15,199 @@
package kamon.metric
-import akka.actor.{ ActorRef, ActorSystem, Props }
+import java.nio.LongBuffer
+
+import akka.actor.{ PoisonPill, Props, ActorRef, ActorSystem }
+import akka.dispatch.MessageDispatcher
import akka.testkit.{ TestKitBase, TestProbe }
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.akka.DispatcherMetrics
-import DispatcherMetrics.DispatcherMetricSnapshot
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.akka.{ ForkJoinPoolDispatcherMetrics, ThreadPoolExecutorDispatcherMetrics }
+import kamon.metric.ActorMetricsTestActor.{ Pong, Ping }
+import kamon.metric.instrument.CollectionContext
+import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
+import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
-class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers {
+class DispatcherMetricsSpec extends TestKitBase with WordSpecLike with Matchers with BeforeAndAfterAll {
implicit lazy val system: ActorSystem = ActorSystem("dispatcher-metrics-spec", ConfigFactory.parseString(
"""
- |kamon.metrics {
- | tick-interval = 1 second
+ |kamon.metric {
+ | tick-interval = 1 hour
| default-collection-context-buffer-size = 10
|
- | filters = [
- | {
- | dispatcher {
- | includes = ["*"]
- | excludes = ["dispatcher-explicitly-excluded"]
- | }
+ | filters = {
+ | akka-dispatcher {
+ | includes = [ "*" ]
+ | excludes = [ "explicitly-excluded" ]
| }
- | ]
+ | }
+ |
+ | default-instrument-settings {
+ | gauge.refresh-interval = 1 hour
+ | min-max-counter.refresh-interval = 1 hour
+ | }
+ |}
+ |
+ |explicitly-excluded {
+ | type = "Dispatcher"
+ | executor = "fork-join-executor"
|}
|
- |dispatcher-explicitly-excluded {
- | type = "Dispatcher"
- | executor = "fork-join-executor"
+ |tracked-fjp {
+ | type = "Dispatcher"
+ | executor = "fork-join-executor"
+ |
+ | fork-join-executor {
+ | parallelism-min = 8
+ | parallelism-factor = 100.0
+ | parallelism-max = 22
+ | }
|}
|
- |tracked-dispatcher {
- | type = "Dispatcher"
- | executor = "thread-pool-executor"
+ |tracked-tpe {
+ | type = "Dispatcher"
+ | executor = "thread-pool-executor"
+ |
+ | thread-pool-executor {
+ | core-pool-size-min = 7
+ | core-pool-size-factor = 100.0
+ | max-pool-size-factor = 100.0
+ | max-pool-size-max = 21
+ | }
|}
|
""".stripMargin))
"the Kamon dispatcher metrics" should {
"respect the configured include and exclude filters" in {
- system.actorOf(Props[ActorMetricsTestActor].withDispatcher("tracked-dispatcher"), "actor-with-tracked-dispatcher")
- system.actorOf(Props[ActorMetricsTestActor].withDispatcher("dispatcher-explicitly-excluded"), "actor-with-excluded-dispatcher")
+ val defaultDispatcher = forceInit(system.dispatchers.lookup("akka.actor.default-dispatcher"))
+ val fjpDispatcher = forceInit(system.dispatchers.lookup("tracked-fjp"))
+ val tpeDispatcher = forceInit(system.dispatchers.lookup("tracked-tpe"))
+ val excludedDispatcher = forceInit(system.dispatchers.lookup("explicitly-excluded"))
+
+ findDispatcherRecorder(defaultDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(fjpDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(tpeDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(excludedDispatcher) should be(empty)
+ }
- Kamon(Metrics).subscribe(DispatcherMetrics, "*", testActor, permanently = true)
- expectMsgType[TickMetricSnapshot]
+ "record metrics for a dispatcher with thread-pool-executor" in {
+ implicit val tpeDispatcher = system.dispatchers.lookup("tracked-tpe")
+ collectDispatcherMetrics(tpeDispatcher)
- within(2 seconds) {
- val tickSnapshot = expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.keys should contain(DispatcherMetrics("tracked-dispatcher"))
- tickSnapshot.metrics.keys should not contain (DispatcherMetrics("dispatcher-explicitly-excluded"))
- }
+ Await.result({
+ Future.sequence {
+ for (_ ← 1 to 100) yield submit(tpeDispatcher)
+ }
+ }, 5 seconds)
+
+ refreshDispatcherInstruments(tpeDispatcher)
+ val snapshot = collectDispatcherMetrics(tpeDispatcher)
+
+ snapshot.gauge("active-threads") should not be empty
+ snapshot.gauge("pool-size").get.min should be >= 7L
+ snapshot.gauge("pool-size").get.max should be <= 21L
+ snapshot.gauge("max-pool-size").get.max should be(21)
+ snapshot.gauge("core-pool-size").get.max should be(21)
+ snapshot.gauge("processed-tasks").get.max should be(102L +- 5L)
+
+ // The processed tasks should be reset to 0 if no more tasks are submitted.
+ val secondSnapshot = collectDispatcherMetrics(tpeDispatcher)
+ secondSnapshot.gauge("processed-tasks").get.max should be(0)
}
- "record maximumPoolSize, runningThreadCount, queueTaskCount, poolSize metrics" in new DelayableActorFixture {
- val (delayable, metricsListener) = delayableActor("worker-actor", "tracked-dispatcher")
+ "record metrics for a dispatcher with fork-join-executor" in {
+ implicit val fjpDispatcher = system.dispatchers.lookup("tracked-fjp")
+ collectDispatcherMetrics(fjpDispatcher)
+
+ Await.result({
+ Future.sequence {
+ for (_ ← 1 to 100) yield submit(fjpDispatcher)
+ }
+ }, 5 seconds)
- for (_ ← 1 to 100) {
- //delayable ! Discard
- }
+ refreshDispatcherInstruments(fjpDispatcher)
+ val snapshot = collectDispatcherMetrics(fjpDispatcher)
+
+ snapshot.minMaxCounter("parallelism").get.max should be(22)
+ snapshot.gauge("pool-size").get.min should be >= 0L
+ snapshot.gauge("pool-size").get.max should be <= 22L
+ snapshot.gauge("active-threads").get.max should be >= 0L
+ snapshot.gauge("running-threads").get.max should be >= 0L
+ snapshot.gauge("queued-task-count").get.max should be(0)
- val dispatcherMetrics = expectDispatcherMetrics("tracked-dispatcher", metricsListener, 3 seconds)
- dispatcherMetrics.maximumPoolSize.max should be <= 64L //fail in travis
- dispatcherMetrics.poolSize.max should be <= 22L //fail in travis
- dispatcherMetrics.queueTaskCount.max should be(0L)
- dispatcherMetrics.runningThreadCount.max should be(0L)
}
- }
+ "clean up the metrics recorders after a dispatcher is shut down" in {
+ implicit val tpeDispatcher = system.dispatchers.lookup("tracked-tpe")
+ implicit val fjpDispatcher = system.dispatchers.lookup("tracked-fjp")
+
+ findDispatcherRecorder(fjpDispatcher) shouldNot be(empty)
+ findDispatcherRecorder(tpeDispatcher) shouldNot be(empty)
- def expectDispatcherMetrics(dispatcherId: String, listener: TestProbe, waitTime: FiniteDuration): DispatcherMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ shutdownDispatcher(tpeDispatcher)
+ shutdownDispatcher(fjpDispatcher)
+
+ findDispatcherRecorder(fjpDispatcher) should be(empty)
+ findDispatcherRecorder(tpeDispatcher) should be(empty)
}
- val dispatcherMetricsOption = tickSnapshot.metrics.get(DispatcherMetrics(dispatcherId))
- dispatcherMetricsOption should not be empty
- dispatcherMetricsOption.get.asInstanceOf[DispatcherMetricSnapshot]
+
+ }
+
+ val collectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- trait DelayableActorFixture {
- def delayableActor(name: String, dispatcher: String): (ActorRef, TestProbe) = {
- val actor = system.actorOf(Props[ActorMetricsTestActor].withDispatcher(dispatcher), name)
- val metricsListener = TestProbe()
+ def actorRecorderName(ref: ActorRef): String = ref.path.elements.mkString("/")
- Kamon(Metrics).subscribe(DispatcherMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
+ def findDispatcherRecorder(dispatcher: MessageDispatcher): Option[EntityRecorder] =
+ Kamon(Metrics)(system).find(dispatcher.id, "akka-dispatcher")
- (actor, metricsListener)
+ def collectDispatcherMetrics(dispatcher: MessageDispatcher): EntitySnapshot =
+ findDispatcherRecorder(dispatcher).map(_.collect(collectionContext)).get
+
+ def refreshDispatcherInstruments(dispatcher: MessageDispatcher): Unit = {
+ findDispatcherRecorder(dispatcher) match {
+ case Some(tpe: ThreadPoolExecutorDispatcherMetrics) ⇒
+ tpe.processedTasks.refreshValue()
+ tpe.activeThreads.refreshValue()
+ tpe.maxPoolSize.refreshValue()
+ tpe.poolSize.refreshValue()
+ tpe.corePoolSize.refreshValue()
+
+ case Some(fjp: ForkJoinPoolDispatcherMetrics) ⇒
+ fjp.activeThreads.refreshValue()
+ fjp.poolSize.refreshValue()
+ fjp.queuedTaskCount.refreshValue()
+ fjp.paralellism.refreshValues()
+ fjp.runningThreads.refreshValue()
+
+ case other ⇒
}
}
+
+ def forceInit(dispatcher: MessageDispatcher): MessageDispatcher = {
+ val listener = TestProbe()
+ Future {
+ listener.ref ! "init done"
+ }(dispatcher)
+ listener.expectMsg("init done")
+
+ dispatcher
+ }
+
+ def submit(dispatcher: MessageDispatcher): Future[String] = Future {
+ "hello"
+ }(dispatcher)
+
+ def shutdownDispatcher(dispatcher: MessageDispatcher): Unit = {
+ val shutdownMethod = dispatcher.getClass.getDeclaredMethod("shutdown")
+ shutdownMethod.setAccessible(true)
+ shutdownMethod.invoke(dispatcher)
+ }
+
+ override protected def afterAll(): Unit = system.shutdown()
}
+
diff --git a/kamon-akka/src/test/scala/kamon/metric/RouterMetricsSpec.scala b/kamon-akka/src/test/scala/kamon/metric/RouterMetricsSpec.scala
index abc195ba..5f6bbb4f 100644
--- a/kamon-akka/src/test/scala/kamon/metric/RouterMetricsSpec.scala
+++ b/kamon-akka/src/test/scala/kamon/metric/RouterMetricsSpec.scala
@@ -18,17 +18,13 @@ package kamon.metric
import java.nio.LongBuffer
import akka.actor._
-import akka.kamon.instrumentation.ActorCellMetrics
import akka.routing._
import akka.testkit.{ ImplicitSender, TestKitBase, TestProbe }
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import kamon.akka.{ RouterMetrics, ActorMetrics }
-import ActorMetrics.{ ActorMetricSnapshot, ActorMetricsRecorder }
-import RouterMetrics._
+import kamon.akka.RouterMetrics
import kamon.metric.RouterMetricsTestActor._
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.instrument.{ Counter, Histogram }
+import kamon.metric.instrument.CollectionContext
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
import scala.concurrent.duration._
@@ -36,22 +32,14 @@ import scala.concurrent.duration._
class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {
implicit lazy val system: ActorSystem = ActorSystem("router-metrics-spec", ConfigFactory.parseString(
"""
- |kamon.metrics {
+ |kamon.metric {
| tick-interval = 1 hour
| default-collection-context-buffer-size = 10
|
- | filters = [
- | {
- | router {
- | includes = [ "user/tracked-*", "user/measuring-*", "user/stop-*" ]
- | excludes = [ "user/tracked-explicitly-excluded-*"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
+ | filters = {
+ | akka-router {
+ | includes = [ "user/tracked-*", "user/measuring-*", "user/stop-*" ]
+ | excludes = [ "user/tracked-explicitly-excluded-*"]
| }
| }
|}
@@ -85,7 +73,7 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
listener.expectMsg(Pong)
val routerSnapshot = collectMetricsOf("user/measuring-routing-time-in-pool-router").get
- routerSnapshot.routingTime.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("routing-time").get.numberOfMeasurements should be(1L)
}
"record the routing-time of the receive function for group routers" in new RouterMetricsFixtures {
@@ -96,7 +84,7 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
listener.expectMsg(Pong)
val routerSnapshot = collectMetricsOf("user/measuring-routing-time-in-group-router").get
- routerSnapshot.routingTime.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("routing-time").get.numberOfMeasurements should be(1L)
}
"record the processing-time of the receive function for pool routers" in new RouterMetricsFixtures {
@@ -107,9 +95,9 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = timingsListener.expectMsgType[RouterTrackedTimings]
val routerSnapshot = collectMetricsOf("user/measuring-processing-time-in-pool-router").get
- routerSnapshot.processingTime.numberOfMeasurements should be(1L)
- routerSnapshot.processingTime.recordsIterator.next().count should be(1L)
- routerSnapshot.processingTime.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ routerSnapshot.histogram("processing-time").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
}
"record the processing-time of the receive function for group routers" in new RouterMetricsFixtures {
@@ -120,9 +108,9 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = timingsListener.expectMsgType[RouterTrackedTimings]
val routerSnapshot = collectMetricsOf("user/measuring-processing-time-in-group-router").get
- routerSnapshot.processingTime.numberOfMeasurements should be(1L)
- routerSnapshot.processingTime.recordsIterator.next().count should be(1L)
- routerSnapshot.processingTime.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
+ routerSnapshot.histogram("processing-time").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("processing-time").get.recordsIterator.next().level should be(timings.approximateProcessingTime +- 10.millis.toNanos)
}
"record the number of errors for pool routers" in new RouterMetricsFixtures {
@@ -137,7 +125,7 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
listener.expectMsg(Pong)
val routerSnapshot = collectMetricsOf("user/measuring-errors-in-pool-router").get
- routerSnapshot.errors.count should be(10L)
+ routerSnapshot.counter("errors").get.count should be(10L)
}
"record the number of errors for group routers" in new RouterMetricsFixtures {
@@ -152,7 +140,7 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
listener.expectMsg(Pong)
val routerSnapshot = collectMetricsOf("user/measuring-errors-in-group-router").get
- routerSnapshot.errors.count should be(10L)
+ routerSnapshot.counter("errors").get.count should be(10L)
}
"record the time-in-mailbox for pool routers" in new RouterMetricsFixtures {
@@ -163,9 +151,9 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = timingsListener.expectMsgType[RouterTrackedTimings]
val routerSnapshot = collectMetricsOf("user/measuring-time-in-mailbox-in-pool-router").get
- routerSnapshot.timeInMailbox.numberOfMeasurements should be(1L)
- routerSnapshot.timeInMailbox.recordsIterator.next().count should be(1L)
- routerSnapshot.timeInMailbox.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ routerSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
}
"record the time-in-mailbox for group routers" in new RouterMetricsFixtures {
@@ -176,33 +164,35 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val timings = timingsListener.expectMsgType[RouterTrackedTimings]
val routerSnapshot = collectMetricsOf("user/measuring-time-in-mailbox-in-group-router").get
- routerSnapshot.timeInMailbox.numberOfMeasurements should be(1L)
- routerSnapshot.timeInMailbox.recordsIterator.next().count should be(1L)
- routerSnapshot.timeInMailbox.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
+ routerSnapshot.histogram("time-in-mailbox").get.numberOfMeasurements should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().count should be(1L)
+ routerSnapshot.histogram("time-in-mailbox").get.recordsIterator.next().level should be(timings.approximateTimeInMailbox +- 10.millis.toNanos)
}
"clean up the associated recorder when the pool router is stopped" in new RouterMetricsFixtures {
val trackedRouter = createTestPoolRouter("stop-in-pool-router")
- routerMetricsRecorderOf("user/stop-in-pool-router") should not be empty
+ val firstRecorder = routerMetricsRecorderOf("user/stop-in-pool-router").get
+ // Killing the router should remove it's RouterMetrics and registering again bellow should create a new one.
val deathWatcher = TestProbe()
deathWatcher.watch(trackedRouter)
trackedRouter ! PoisonPill
deathWatcher.expectTerminated(trackedRouter)
- routerMetricsRecorderOf("user/stop-in-pool-router") shouldBe empty
+ routerMetricsRecorderOf("user/stop-in-pool-router").get shouldNot be theSameInstanceAs (firstRecorder)
}
"clean up the associated recorder when the group router is stopped" in new RouterMetricsFixtures {
val trackedRouter = createTestPoolRouter("stop-in-group-router")
- routerMetricsRecorderOf("user/stop-in-group-router") should not be empty
+ val firstRecorder = routerMetricsRecorderOf("user/stop-in-group-router").get
+ // Killing the router should remove it's RouterMetrics and registering again bellow should create a new one.
val deathWatcher = TestProbe()
deathWatcher.watch(trackedRouter)
trackedRouter ! PoisonPill
deathWatcher.expectTerminated(trackedRouter)
- routerMetricsRecorderOf("user/stop-in-group-router") shouldBe empty
+ routerMetricsRecorderOf("user/stop-in-group-router").get shouldNot be theSameInstanceAs (firstRecorder)
}
}
@@ -213,10 +203,10 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- def routerMetricsRecorderOf(routerName: String): Option[RouterMetricsRecorder] =
- Kamon(Metrics)(system).storage.get(RouterMetrics(routerName)).map(_.asInstanceOf[RouterMetricsRecorder])
+ def routerMetricsRecorderOf(routerName: String): Option[RouterMetrics] =
+ Kamon(Metrics)(system).register(RouterMetrics, routerName).map(_.recorder)
- def collectMetricsOf(routerName: String): Option[RouterMetricSnapshot] = {
+ def collectMetricsOf(routerName: String): Option[EntitySnapshot] = {
Thread.sleep(5) // Just in case the test advances a bit faster than the actor being tested.
routerMetricsRecorderOf(routerName).map(_.collect(collectionContext))
}
@@ -255,16 +245,6 @@ class RouterMetricsSpec extends TestKitBase with WordSpecLike with Matchers with
router
}
}
-
- trait ActorMetricsFixtures {
- val collectionContext = new CollectionContext {
- val buffer: LongBuffer = LongBuffer.allocate(10000)
- }
-
- def createTestActor(name: String): ActorRef = system.actorOf(Props[ActorMetricsTestActor], name)
-
- def takeSnapshotOf(amr: ActorMetricsRecorder): ActorMetricSnapshot = amr.collect(collectionContext)
- }
}
class RouterMetricsTestActor extends Actor {
diff --git a/kamon-core/src/main/resources/META-INF/aop.xml b/kamon-core/src/main/resources/META-INF/aop.xml
index 47ce11d8..854e9437 100644
--- a/kamon-core/src/main/resources/META-INF/aop.xml
+++ b/kamon-core/src/main/resources/META-INF/aop.xml
@@ -2,19 +2,13 @@
<aspectj>
<aspects>
+
<!-- Disable AspectJ Weaver not present error -->
<aspect name="kamon.instrumentation.AspectJWeaverMissingWarning"/>
- <!-- Futures -->
- <aspect name="kamon.instrumentation.scala.FutureInstrumentation"/>
- <aspect name="kamon.instrumentation.scalaz.FutureInstrumentation"/>
-
</aspects>
<weaver>
- <include within="scala.concurrent..*"/>
- <include within="scalaz.concurrent..*"/>
- <include within="spray..*"/>
<include within="kamon..*"/>
</weaver>
diff --git a/kamon-core/src/main/resources/reference.conf b/kamon-core/src/main/resources/reference.conf
index 8f5a8b45..cd257ebe 100644
--- a/kamon-core/src/main/resources/reference.conf
+++ b/kamon-core/src/main/resources/reference.conf
@@ -3,19 +3,11 @@
# ================================== #
kamon {
-
- # Default dispatcher for all Kamon components, unless a more specific one is configured.
- default-dispatcher = "kamon.kamon-dispatcher"
-
- metrics {
+ metric {
# Time interval for collecting all metrics and send the snapshots to all subscribed actors.
tick-interval = 1 second
- # Time interval for recording values on all registered gauges.
- gauge-recording-interval = 100 milliseconds
-
-
# Default size for the LongBuffer that gets allocated for metrics collection and merge. The
# value should correspond to the highest number of different buckets with values that might
# exist in a single histogram during a metrics collection. The default value of 33792 is a
@@ -31,69 +23,79 @@ kamon {
# it might be ok for you to turn this error off.
disable-aspectj-weaver-missing-error = false
+ # Specify if entities that do not match any include/exclude filter should be tracked.
+ track-unmatched-entities = yes
- dispatchers {
-
- # Dispatcher for periodical gauge value recordings.
- gauge-recordings = ${kamon.default-dispatcher}
-
- # Dispatcher for subscriptions and metrics collection actors.
- metric-subscriptions = ${kamon.default-dispatcher}
- }
-
-
- filters = [
- {
- actor {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- router {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- trace {
- includes = [ "*" ]
- excludes = []
- }
- },
- {
- dispatcher {
- includes = [ "default-dispatcher" ]
- excludes = []
- }
+ filters {
+ trace {
+ includes = [ "**" ]
+ excludes = [ ]
}
- ]
+ }
- precision {
- default-histogram-precision {
+ # Default instrument settings for histograms, min max counters and gaugues. The actual settings to be used when
+ # creating a instrument is determined by merging the default settings, code settings and specific instrument
+ # settings using the following priorities (top wins):
+
+ # - any setting in `kamon.metric.instrument-settings` for the given category/instrument.
+ # - code settings provided when creating the instrument.
+ # - `default-instrument-settings`.
+ #
+ default-instrument-settings {
+ histogram {
+ precision = normal
+ lowest-discernible-value = 1
highest-trackable-value = 3600000000000
- significant-value-digits = 2
}
- default-min-max-counter-precision {
- refresh-interval = 100 milliseconds
+ min-max-counter {
+ precision = normal
+ lowest-discernible-value = 1
highest-trackable-value = 999999999
- significant-value-digits = 2
+ refresh-interval = 100 milliseconds
}
- default-gauge-precision {
+ gauge {
+ precision = normal
+ lowest-discernible-value = 1
+ highest-trackable-value = 3600000000000
refresh-interval = 100 milliseconds
- highest-trackable-value = 999999999
- significant-value-digits = 2
}
- trace {
- elapsed-time = ${kamon.metrics.precision.default-histogram-precision}
- segment = ${kamon.metrics.precision.default-histogram-precision}
- }
+ }
+
+ # Custom configurations for category instruments. The settings provided in this section will override the default
+ # and code instrument settings as explained in the `default-instrument-settings` key. There is no need to provide
+ # full instrument settings in this section, only the settings that should be overriden must be included. Example:
+ # if you wish to change the precision and lowest discernible value of the `elapsed-time` instrument for the `trace`
+ # category, you should include the following configuration in your application.conf file:
+ #
+ # kamon.metric.instrument-settings.trace {
+ # elapsed-time {
+ # precision = fine
+ # lowest-discernible-value = 1000
+ # }
+ # }
+ #
+ # In this example, the value for the `highest-trackable-value` setting will be either the code setting or the default
+ # setting, depending on how the `elapsed-time` metric is created.
+ instrument-settings {
+
+ }
+
+ dispatchers {
+
+ # Dispatcher for the actor that will collect all recorded metrics on every tick and dispatch them to all subscribers.
+ metric-collection = akka.actor.default-dispatcher
+
+ # Dispatcher for the Kamon refresh scheduler, used by all MinMaxCounters and Gaugues to update their values.
+ refresh-scheduler = akka.actor.default-dispatcher
}
}
+
+
+
trace {
# Level of detail used when recording trace information. The posible values are:
@@ -101,7 +103,7 @@ kamon {
# to the subscriptors of trace data.
# - simple-trace: metrics for all included traces and all segments are recorded and additionally a Trace message
# containing the trace and segments details and metadata.
- level = metrics-only
+ level-of-detail = metrics-only
# Sampling strategy to apply when the tracing level is set to `simple-trace`. The options are: all, random, ordered
# and threshold. The details of each sampler are bellow.
@@ -142,7 +144,7 @@ kamon {
}
# Default dispatcher for all trace module operations
- dispatcher = ${kamon.default-dispatcher}
+ dispatcher = "akka.actor.default-dispatcher"
}
kamon-dispatcher {
diff --git a/kamon-core/src/main/scala/kamon/Kamon.scala b/kamon-core/src/main/scala/kamon/Kamon.scala
index 00026b77..f07f846b 100644
--- a/kamon-core/src/main/scala/kamon/Kamon.scala
+++ b/kamon-core/src/main/scala/kamon/Kamon.scala
@@ -16,9 +16,26 @@ package kamon
import _root_.akka.actor
import _root_.akka.actor._
+import com.typesafe.config.Config
+import kamon.metric._
+import kamon.trace.{ Tracer, TracerExtension }
+
+class Kamon(val actorSystem: ActorSystem) {
+ val metrics: MetricsExtension = Metrics.get(actorSystem)
+ val tracer: TracerExtension = Tracer.get(actorSystem)
+ val userMetrics: UserMetricsExtension = UserMetrics.get(actorSystem)
+}
object Kamon {
trait Extension extends actor.Extension
def apply[T <: Extension](key: ExtensionId[T])(implicit system: ActorSystem): T = key(system)
-}
+ def apply(actorSystemName: String): Kamon =
+ apply(ActorSystem(actorSystemName))
+
+ def apply(actorSystemName: String, config: Config): Kamon =
+ apply(ActorSystem(actorSystemName, config))
+
+ def apply(system: ActorSystem): Kamon =
+ new Kamon(system)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/ModuleSupervisor.scala b/kamon-core/src/main/scala/kamon/ModuleSupervisor.scala
new file mode 100644
index 00000000..99d87719
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/ModuleSupervisor.scala
@@ -0,0 +1,48 @@
+package kamon
+
+import _root_.akka.actor
+import _root_.akka.actor._
+import kamon.ModuleSupervisor.CreateModule
+
+import scala.concurrent.{ Future, Promise }
+import scala.util.Success
+
+object ModuleSupervisor extends ExtensionId[ModuleSupervisorExtension] with ExtensionIdProvider {
+
+ def lookup(): ExtensionId[_ <: actor.Extension] = ModuleSupervisor
+ def createExtension(system: ExtendedActorSystem): ModuleSupervisorExtension = new ModuleSupervisorExtensionImpl(system)
+
+ case class CreateModule(name: String, props: Props, childPromise: Promise[ActorRef])
+}
+
+trait ModuleSupervisorExtension extends actor.Extension {
+ def createModule(name: String, props: Props): Future[ActorRef]
+}
+
+class ModuleSupervisorExtensionImpl(system: ExtendedActorSystem) extends ModuleSupervisorExtension {
+ import system.dispatcher
+ private lazy val supervisor = system.actorOf(Props[ModuleSupervisor], "kamon")
+
+ def createModule(name: String, props: Props): Future[ActorRef] = Future {} flatMap { _: Unit ⇒
+ val modulePromise = Promise[ActorRef]()
+ supervisor ! CreateModule(name, props, modulePromise)
+ modulePromise.future
+ }
+}
+
+class ModuleSupervisor extends Actor with ActorLogging {
+
+ def receive = {
+ case CreateModule(name, props, childPromise) ⇒ createChildModule(name, props, childPromise)
+ }
+
+ def createChildModule(name: String, props: Props, childPromise: Promise[ActorRef]): Unit = {
+ context.child(name).map { alreadyAvailableModule ⇒
+ log.warning("Received a request to create module [{}] but the module is already available, returning the existent one.")
+ childPromise.complete(Success(alreadyAvailableModule))
+
+ } getOrElse {
+ childPromise.complete(Success(context.actorOf(props, name)))
+ }
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
index 0dd189f6..22f54ab0 100644
--- a/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/http/HttpServerMetrics.scala
@@ -1,99 +1,25 @@
package kamon.http
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.instrument.Counter
-import kamon.metric._
-
-import scala.collection.concurrent.TrieMap
-
-object HttpServerMetrics extends MetricGroupIdentity {
- import Metrics.AtomicGetOrElseUpdateForTriemap
-
- val name: String = "http-server-metrics-recorder"
- val category = new MetricGroupCategory {
- val name: String = "http-server"
- }
-
- type TraceName = String
- type StatusCode = String
-
- case class CountPerStatusCode(statusCode: String) extends MetricIdentity {
- def name: String = statusCode
- }
-
- case class TraceCountPerStatus(traceName: TraceName, statusCode: StatusCode) extends MetricIdentity {
- def name: String = traceName + "_" + statusCode
- }
-
- class HttpServerMetricsRecorder extends MetricGroupRecorder {
-
- private val counters = TrieMap[StatusCode, Counter]()
- private val countersPerTrace = TrieMap[TraceName, TrieMap[StatusCode, Counter]]()
-
- def recordResponse(statusCode: StatusCode): Unit = recordResponse(statusCode, 1L)
-
- def recordResponse(statusCode: StatusCode, count: Long): Unit =
- counters.atomicGetOrElseUpdate(statusCode, Counter()).increment(count)
-
- def recordResponse(traceName: TraceName, statusCode: StatusCode): Unit = recordResponse(traceName, statusCode, 1L)
-
- def recordResponse(traceName: TraceName, statusCode: StatusCode, count: Long): Unit = {
- recordResponse(statusCode, count)
- countersPerTrace.atomicGetOrElseUpdate(traceName, TrieMap()).atomicGetOrElseUpdate(statusCode, Counter()).increment(count)
- }
-
- def collect(context: CollectionContext): HttpServerMetricsSnapshot = {
- val countsPerStatusCode = counters.map {
- case (statusCode, counter) ⇒ (statusCode, counter.collect(context))
- }.toMap
-
- val countsPerTraceAndStatus = countersPerTrace.map {
- case (traceName, countsPerStatus) ⇒
- (traceName, countsPerStatus.map { case (statusCode, counter) ⇒ (statusCode, counter.collect(context)) }.toMap)
- }.toMap
-
- HttpServerMetricsSnapshot(countsPerStatusCode, countsPerTraceAndStatus)
- }
-
- def cleanup: Unit = {}
+import kamon.metric.{ EntityRecorderFactory, GenericEntityRecorder }
+import kamon.metric.instrument.InstrumentFactory
+
+/**
+ * Counts HTTP response status codes into per status code and per trace name + status counters. If recording a HTTP
+ * response with status 500 for the trace "GetUser", the counter with name "500" as well as the counter with name
+ * "GetUser_500" will be incremented.
+ */
+class HttpServerMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+
+ def recordResponse(statusCode: String): Unit =
+ counter(statusCode).increment()
+
+ def recordResponse(traceName: String, statusCode: String): Unit = {
+ recordResponse(statusCode)
+ counter(traceName + "_" + statusCode).increment()
}
-
- case class HttpServerMetricsSnapshot(countsPerStatusCode: Map[StatusCode, Counter.Snapshot],
- countsPerTraceAndStatusCode: Map[TraceName, Map[StatusCode, Counter.Snapshot]]) extends MetricGroupSnapshot {
-
- type GroupSnapshotType = HttpServerMetricsSnapshot
-
- def merge(that: HttpServerMetricsSnapshot, context: CollectionContext): HttpServerMetricsSnapshot = {
- val combinedCountsPerStatus = combineMaps(countsPerStatusCode, that.countsPerStatusCode)((l, r) ⇒ l.merge(r, context))
- val combinedCountsPerTraceAndStatus = combineMaps(countsPerTraceAndStatusCode, that.countsPerTraceAndStatusCode) {
- (leftCounts, rightCounts) ⇒ combineMaps(leftCounts, rightCounts)((l, r) ⇒ l.merge(r, context))
- }
- HttpServerMetricsSnapshot(combinedCountsPerStatus, combinedCountsPerTraceAndStatus)
- }
-
- def metrics: Map[MetricIdentity, MetricSnapshot] = {
- countsPerStatusCode.map {
- case (statusCode, count) ⇒ (CountPerStatusCode(statusCode), count)
- } ++ {
- for (
- (traceName, countsPerStatus) ← countsPerTraceAndStatusCode;
- (statusCode, count) ← countsPerStatus
- ) yield (TraceCountPerStatus(traceName, statusCode), count)
- }
- }
- }
-
- val Factory = HttpServerMetricGroupFactory
}
-case object HttpServerMetricGroupFactory extends MetricGroupFactory {
-
- import HttpServerMetrics._
-
- type GroupRecorder = HttpServerMetricsRecorder
-
- def create(config: Config, system: ActorSystem): HttpServerMetricsRecorder =
- new HttpServerMetricsRecorder()
-
-} \ No newline at end of file
+object HttpServerMetrics extends EntityRecorderFactory[HttpServerMetrics] {
+ def category: String = "http-server"
+ def createRecorder(instrumentFactory: InstrumentFactory): HttpServerMetrics = new HttpServerMetrics(instrumentFactory)
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/Entity.scala b/kamon-core/src/main/scala/kamon/metric/Entity.scala
new file mode 100644
index 00000000..962626e0
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/Entity.scala
@@ -0,0 +1,52 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+/**
+ * Identify a `thing` that is being monitored by Kamon. A [[kamon.metric.Entity]] is used to identify tracked `things`
+ * in both the metrics recording and reporting sides. Only the name and category fields are used with determining
+ * equality between two entities.
+ *
+ * // TODO: Find a better word for `thing`.
+ */
+class Entity(val name: String, val category: String, val metadata: Map[String, String]) {
+
+ override def equals(o: Any): Boolean = {
+ if (this eq o.asInstanceOf[AnyRef])
+ true
+ else if ((o.asInstanceOf[AnyRef] eq null) || !o.isInstanceOf[Entity])
+ false
+ else {
+ val thatAsEntity = o.asInstanceOf[Entity]
+ category == thatAsEntity.category && name == thatAsEntity.name
+ }
+ }
+
+ override def hashCode: Int = {
+ var result: Int = name.hashCode
+ result = 31 * result + category.hashCode
+ return result
+ }
+}
+
+object Entity {
+ def apply(name: String, category: String): Entity =
+ apply(name, category, Map.empty)
+
+ def apply(name: String, category: String, metadata: Map[String, String]): Entity =
+ new Entity(name, category, metadata)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala b/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala
deleted file mode 100644
index 3761f5a5..00000000
--- a/kamon-core/src/main/scala/kamon/metric/EntityMetrics.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import java.nio.{ LongBuffer }
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-
-trait MetricGroupCategory {
- def name: String
-}
-
-trait MetricGroupIdentity {
- def name: String
- def category: MetricGroupCategory
-}
-
-trait MetricIdentity {
- def name: String
-}
-
-trait CollectionContext {
- def buffer: LongBuffer
-}
-
-object CollectionContext {
- def apply(longBufferSize: Int): CollectionContext = new CollectionContext {
- val buffer: LongBuffer = LongBuffer.allocate(longBufferSize)
- }
-}
-
-trait MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot
- def cleanup: Unit
-}
-
-trait MetricSnapshot {
- type SnapshotType
-
- def merge(that: SnapshotType, context: CollectionContext): SnapshotType
-}
-
-trait MetricGroupSnapshot {
- type GroupSnapshotType
-
- def metrics: Map[MetricIdentity, MetricSnapshot]
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType
-}
-
-private[kamon] trait MetricRecorder {
- type SnapshotType <: MetricSnapshot
-
- def collect(context: CollectionContext): SnapshotType
- def cleanup: Unit
-}
-
-trait MetricGroupFactory {
- type GroupRecorder <: MetricGroupRecorder
- def create(config: Config, system: ActorSystem): GroupRecorder
-}
-
diff --git a/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala b/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala
new file mode 100644
index 00000000..7a1972f0
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/EntityRecorder.scala
@@ -0,0 +1,157 @@
+package kamon.metric
+
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.metric.instrument._
+
+import scala.collection.concurrent.TrieMap
+import scala.concurrent.duration.FiniteDuration
+
+trait EntityRecorder {
+ def collect(collectionContext: CollectionContext): EntitySnapshot
+ def cleanup: Unit
+}
+
+trait EntityRecorderFactory[T <: EntityRecorder] {
+ def category: String
+ def createRecorder(instrumentFactory: InstrumentFactory): T
+}
+
+abstract class GenericEntityRecorder(instrumentFactory: InstrumentFactory) extends EntityRecorder {
+ import kamon.util.TriemapAtomicGetOrElseUpdate.Syntax
+
+ private val _instruments = TrieMap.empty[MetricKey, Instrument]
+ private def register[T <: Instrument](key: MetricKey, instrument: ⇒ T): T =
+ _instruments.atomicGetOrElseUpdate(key, instrument).asInstanceOf[T]
+
+ protected def histogram(name: String): Histogram =
+ register(HistogramKey(name), instrumentFactory.createHistogram(name))
+
+ protected def histogram(name: String, dynamicRange: DynamicRange): Histogram =
+ register(HistogramKey(name), instrumentFactory.createHistogram(name, Some(dynamicRange)))
+
+ protected def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ register(HistogramKey(name, unitOfMeasurement), instrumentFactory.createHistogram(name))
+
+ protected def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ register(HistogramKey(name, unitOfMeasurement), instrumentFactory.createHistogram(name, Some(dynamicRange)))
+
+ protected def histogram(key: HistogramKey): Histogram =
+ register(key, instrumentFactory.createHistogram(key.name))
+
+ protected def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram =
+ register(key, instrumentFactory.createHistogram(key.name, Some(dynamicRange)))
+
+ protected def removeHistogram(name: String): Unit =
+ _instruments.remove(HistogramKey(name))
+
+ protected def removeHistogram(key: HistogramKey): Unit =
+ _instruments.remove(key)
+
+ protected def minMaxCounter(name: String): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange)))
+
+ protected def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange)))
+
+ protected def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name, unitOfMeasurement), instrumentFactory.createMinMaxCounter(name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ register(MinMaxCounterKey(name), instrumentFactory.createMinMaxCounter(name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, Some(dynamicRange)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, refreshInterval = Some(refreshInterval)))
+
+ protected def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ register(key, instrumentFactory.createMinMaxCounter(key.name, Some(dynamicRange), Some(refreshInterval)))
+
+ protected def removeMinMaxCounter(name: String): Unit =
+ _instruments.remove(MinMaxCounterKey(name))
+
+ protected def removeMinMaxCounter(key: MinMaxCounterKey): Unit =
+ _instruments.remove(key)
+
+ protected def gauge(name: String, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, Some(dynamicRange), Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name), instrumentFactory.createGauge(name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ register(GaugeKey(name, unitOfMeasurement), instrumentFactory.createGauge(name, Some(dynamicRange), Some(refreshInterval), valueCollector))
+
+ protected def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, Some(dynamicRange), valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, refreshInterval = Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ register(key, instrumentFactory.createGauge(key.name, Some(dynamicRange), Some(refreshInterval), valueCollector = valueCollector))
+
+ protected def removeGauge(name: String): Unit =
+ _instruments.remove(GaugeKey(name))
+
+ protected def removeGauge(key: GaugeKey): Unit =
+ _instruments.remove(key)
+
+ protected def counter(name: String): Counter =
+ register(CounterKey(name), instrumentFactory.createCounter())
+
+ protected def counter(key: CounterKey): Counter =
+ register(key, instrumentFactory.createCounter())
+
+ protected def removeCounter(name: String): Unit =
+ _instruments.remove(CounterKey(name))
+
+ protected def removeCounter(key: CounterKey): Unit =
+ _instruments.remove(key)
+
+ def collect(collectionContext: CollectionContext): EntitySnapshot = {
+ val snapshots = Map.newBuilder[MetricKey, InstrumentSnapshot]
+ _instruments.foreach {
+ case (key, instrument) ⇒ snapshots += key -> instrument.collect(collectionContext)
+ }
+
+ new DefaultEntitySnapshot(snapshots.result())
+ }
+
+ def cleanup: Unit = _instruments.values.foreach(_.cleanup)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala b/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala
new file mode 100644
index 00000000..17c8f4c5
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/EntitySnapshot.scala
@@ -0,0 +1,47 @@
+package kamon.metric
+
+import kamon.metric.instrument.{ Counter, Histogram, CollectionContext, InstrumentSnapshot }
+import kamon.util.MapMerge
+import scala.reflect.ClassTag
+
+trait EntitySnapshot {
+ def metrics: Map[MetricKey, InstrumentSnapshot]
+ def merge(that: EntitySnapshot, collectionContext: CollectionContext): EntitySnapshot
+
+ def histogram(name: String): Option[Histogram.Snapshot] =
+ find[HistogramKey, Histogram.Snapshot](name)
+
+ def minMaxCounter(name: String): Option[Histogram.Snapshot] =
+ find[MinMaxCounterKey, Histogram.Snapshot](name)
+
+ def gauge(name: String): Option[Histogram.Snapshot] =
+ find[GaugeKey, Histogram.Snapshot](name)
+
+ def counter(name: String): Option[Counter.Snapshot] =
+ find[CounterKey, Counter.Snapshot](name)
+
+ def histograms: Map[HistogramKey, Histogram.Snapshot] =
+ filterByType[HistogramKey, Histogram.Snapshot]
+
+ def minMaxCounters: Map[MinMaxCounterKey, Histogram.Snapshot] =
+ filterByType[MinMaxCounterKey, Histogram.Snapshot]
+
+ def gauges: Map[GaugeKey, Histogram.Snapshot] =
+ filterByType[GaugeKey, Histogram.Snapshot]
+
+ def counters: Map[CounterKey, Counter.Snapshot] =
+ filterByType[CounterKey, Counter.Snapshot]
+
+ private def filterByType[K <: MetricKey, V <: InstrumentSnapshot](implicit keyCT: ClassTag[K]): Map[K, V] =
+ metrics.collect { case (k, v) if keyCT.runtimeClass.isInstance(k) ⇒ (k.asInstanceOf[K], v.asInstanceOf[V]) }
+
+ private def find[K <: MetricKey, V <: InstrumentSnapshot](name: String)(implicit keyCT: ClassTag[K]) =
+ metrics.find { case (k, v) ⇒ keyCT.runtimeClass.isInstance(k) && k.name == name } map (_._2.asInstanceOf[V])
+}
+
+class DefaultEntitySnapshot(val metrics: Map[MetricKey, InstrumentSnapshot]) extends EntitySnapshot {
+ import MapMerge.Syntax
+
+ override def merge(that: EntitySnapshot, collectionContext: CollectionContext): EntitySnapshot =
+ new DefaultEntitySnapshot(metrics.merge(that.metrics, (l, r) ⇒ l.merge(r, collectionContext)))
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricKey.scala b/kamon-core/src/main/scala/kamon/metric/MetricKey.scala
new file mode 100644
index 00000000..a17972df
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/MetricKey.scala
@@ -0,0 +1,153 @@
+package kamon.metric
+
+import kamon.metric.instrument.{ InstrumentTypes, InstrumentType, UnitOfMeasurement }
+
+/**
+ * MetricKeys are used to identify a given metric in entity recorders and snapshots. MetricKeys can be used to encode
+ * additional metadata for a metric being recorded, as well as the unit of measurement of the data being recorder.
+ */
+sealed trait MetricKey {
+ def name: String
+ def unitOfMeasurement: UnitOfMeasurement
+ def instrumentType: InstrumentType
+ def metadata: Map[String, String]
+}
+
+// Wish that there was a shorter way to describe the operations bellow, but apparently there is no way to generalize all
+// the apply/create versions that would produce the desired return types when used from Java.
+
+/**
+ * MetricKey for all Histogram-based metrics.
+ */
+case class HistogramKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Histogram
+}
+
+object HistogramKey {
+ def apply(name: String): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): HistogramKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): HistogramKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): HistogramKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): HistogramKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): HistogramKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all MinMaxCounter-based metrics.
+ */
+case class MinMaxCounterKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.MinMaxCounter
+}
+
+object MinMaxCounterKey {
+ def apply(name: String): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounterKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): MinMaxCounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounterKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): MinMaxCounterKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all Gauge-based metrics.
+ */
+case class GaugeKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Gauge
+}
+
+object GaugeKey {
+ def apply(name: String): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): GaugeKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): GaugeKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): GaugeKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): GaugeKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): GaugeKey =
+ apply(name, unitOfMeasurement, metadata)
+}
+
+/**
+ * MetricKey for all Counter-based metrics.
+ */
+case class CounterKey(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]) extends MetricKey {
+ val instrumentType = InstrumentTypes.Counter
+}
+
+object CounterKey {
+ def apply(name: String): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def apply(name: String, unitOfMeasurement: UnitOfMeasurement): CounterKey =
+ apply(name, unitOfMeasurement, Map.empty)
+
+ def apply(name: String, metadata: Map[String, String]): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown, Map.empty)
+
+ /**
+ * Java friendly versions:
+ */
+
+ def create(name: String): CounterKey =
+ apply(name, UnitOfMeasurement.Unknown)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement): CounterKey =
+ apply(name, unitOfMeasurement)
+
+ def create(name: String, metadata: Map[String, String]): CounterKey =
+ apply(name, metadata)
+
+ def create(name: String, unitOfMeasurement: UnitOfMeasurement, metadata: Map[String, String]): CounterKey =
+ apply(name, unitOfMeasurement, metadata)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
index ed55ab06..b738eeb9 100644
--- a/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
+++ b/kamon-core/src/main/scala/kamon/metric/MetricsExtension.scala
@@ -16,91 +16,119 @@
package kamon.metric
-import akka.event.Logging.Error
-import akka.event.EventStream
+import akka.actor
+import kamon.metric.SubscriptionsDispatcher.{ Unsubscribe, Subscribe }
+import kamon.{ ModuleSupervisor, Kamon }
+import kamon.metric.instrument.{ InstrumentFactory, CollectionContext }
import scala.collection.concurrent.TrieMap
import akka.actor._
-import com.typesafe.config.Config
-import kamon.util.GlobPathFilter
-import kamon.Kamon
-import akka.actor
-import kamon.metric.Metrics.MetricGroupFilter
-import kamon.metric.Subscriptions.{ Unsubscribe, Subscribe }
-import java.util.concurrent.TimeUnit
+import kamon.util.{ FastDispatch, TriemapAtomicGetOrElseUpdate }
-class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- import Metrics.AtomicGetOrElseUpdateForTriemap
+object Metrics extends ExtensionId[MetricsExtension] with ExtensionIdProvider {
+ override def get(system: ActorSystem): MetricsExtension = super.get(system)
+ def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
+ def createExtension(system: ExtendedActorSystem): MetricsExtension = new MetricsExtensionImpl(system)
+}
- val metricsExtConfig = system.settings.config.getConfig("kamon.metrics")
- printInitializationMessage(system.eventStream, metricsExtConfig.getBoolean("disable-aspectj-weaver-missing-error"))
+case class EntityRegistration[T <: EntityRecorder](entity: Entity, recorder: T)
- /** Configured Dispatchers */
- val metricSubscriptionsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.metric-subscriptions"))
- val gaugeRecordingsDispatcher = system.dispatchers.lookup(metricsExtConfig.getString("dispatchers.gauge-recordings"))
+trait MetricsExtension extends Kamon.Extension {
+ def settings: MetricsExtensionSettings
+ def shouldTrack(entity: Entity): Boolean
+ def shouldTrack(entityName: String, category: String): Boolean =
+ shouldTrack(Entity(entityName, category))
- /** Configuration Settings */
- val gaugeRecordingInterval = metricsExtConfig.getDuration("gauge-recording-interval", TimeUnit.MILLISECONDS)
+ def register[T <: EntityRecorder](recorderFactory: EntityRecorderFactory[T], entityName: String): Option[EntityRegistration[T]]
+ def register[T <: EntityRecorder](entity: Entity, recorder: T): EntityRegistration[T]
+ def unregister(entity: Entity): Unit
- val storage = TrieMap[MetricGroupIdentity, MetricGroupRecorder]()
- val filters = loadFilters(metricsExtConfig)
- lazy val subscriptions = system.actorOf(Props[Subscriptions], "kamon-metrics-subscriptions")
+ def find(entity: Entity): Option[EntityRecorder]
+ def find(name: String, category: String): Option[EntityRecorder]
- def register(identity: MetricGroupIdentity, factory: MetricGroupFactory): Option[factory.GroupRecorder] = {
- if (shouldTrack(identity))
- Some(storage.atomicGetOrElseUpdate(identity, factory.create(metricsExtConfig, system)).asInstanceOf[factory.GroupRecorder])
- else
- None
- }
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef): Unit =
+ subscribe(filter, subscriber, permanently = false)
- def unregister(identity: MetricGroupIdentity): Unit = {
- storage.remove(identity).map(_.cleanup)
- }
+ def subscribe(category: String, selection: String, subscriber: ActorRef, permanently: Boolean): Unit =
+ subscribe(SubscriptionFilter(category, selection), subscriber, permanently)
- def subscribe[C <: MetricGroupCategory](category: C, selection: String, subscriber: ActorRef, permanently: Boolean = false): Unit =
- subscriptions.tell(Subscribe(category, selection, subscriber, permanently), subscriber)
+ def subscribe(category: String, selection: String, subscriber: ActorRef): Unit =
+ subscribe(SubscriptionFilter(category, selection), subscriber, permanently = false)
- def unsubscribe(subscriber: ActorRef): Unit =
- subscriptions.tell(Unsubscribe(subscriber), subscriber)
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanently: Boolean): Unit
- def scheduleGaugeRecorder(body: ⇒ Unit): Cancellable = {
- import scala.concurrent.duration._
+ def unsubscribe(subscriber: ActorRef): Unit
+ def buildDefaultCollectionContext: CollectionContext
+ def instrumentFactory(category: String): InstrumentFactory
+}
- system.scheduler.schedule(gaugeRecordingInterval milliseconds, gaugeRecordingInterval milliseconds) {
- body
- }(gaugeRecordingsDispatcher)
- }
+class MetricsExtensionImpl(system: ExtendedActorSystem) extends MetricsExtension {
+ import FastDispatch.Syntax
- private def shouldTrack(identity: MetricGroupIdentity): Boolean = {
- filters.get(identity.category.name).map(filter ⇒ filter.accept(identity.name)).getOrElse(true)
- }
+ val settings = MetricsExtensionSettings(system)
- def loadFilters(config: Config): Map[String, MetricGroupFilter] = {
- import scala.collection.JavaConverters._
+ private val _trackedEntities = TrieMap.empty[Entity, EntityRecorder]
+ private val _collectionContext = buildDefaultCollectionContext
+ private val _metricsCollectionDispatcher = system.dispatchers.lookup(settings.metricCollectionDispatcher)
+ private val _subscriptions = ModuleSupervisor.get(system).createModule("subscriptions-dispatcher",
+ SubscriptionsDispatcher.props(settings.tickInterval, collectSnapshots).withDispatcher(settings.metricCollectionDispatcher))
- val filters = config.getObjectList("filters").asScala
+ def shouldTrack(entity: Entity): Boolean =
+ settings.entityFilters.get(entity.category).map {
+ filter ⇒ filter.accept(entity.name)
- val allFilters =
- for (
- filter ← filters;
- entry ← filter.entrySet().asScala
- ) yield {
- val key = entry.getKey
- val keyBasedConfig = entry.getValue.atKey(key)
+ } getOrElse (settings.trackUnmatchedEntities)
- val includes = keyBasedConfig.getStringList(s"$key.includes").asScala.map(inc ⇒ new GlobPathFilter(inc)).toList
- val excludes = keyBasedConfig.getStringList(s"$key.excludes").asScala.map(exc ⇒ new GlobPathFilter(exc)).toList
+ def register[T <: EntityRecorder](recorderFactory: EntityRecorderFactory[T], entityName: String): Option[EntityRegistration[T]] = {
+ import TriemapAtomicGetOrElseUpdate.Syntax
+ val entity = Entity(entityName, recorderFactory.category)
- (key, MetricGroupFilter(includes, excludes))
- }
+ if (shouldTrack(entity)) {
+ val instrumentFactory = settings.instrumentFactories.get(recorderFactory.category).getOrElse(settings.defaultInstrumentFactory)
+ val recorder = _trackedEntities.atomicGetOrElseUpdate(entity, recorderFactory.createRecorder(instrumentFactory)).asInstanceOf[T]
+ Some(EntityRegistration(entity, recorder))
+ } else None
+ }
- allFilters.toMap
+ def register[T <: EntityRecorder](entity: Entity, recorder: T): EntityRegistration[T] = {
+ import TriemapAtomicGetOrElseUpdate.Syntax
+ EntityRegistration(entity, _trackedEntities.atomicGetOrElseUpdate(entity, recorder).asInstanceOf[T])
}
+ def unregister(entity: Entity): Unit =
+ _trackedEntities.remove(entity).map(_.cleanup)
+
+ def find(entity: Entity): Option[EntityRecorder] =
+ _trackedEntities.get(entity)
+
+ def find(name: String, category: String): Option[EntityRecorder] =
+ find(Entity(name, category))
+
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanent: Boolean): Unit =
+ _subscriptions.fastDispatch(Subscribe(filter, subscriber, permanent))(_metricsCollectionDispatcher)
+
+ def unsubscribe(subscriber: ActorRef): Unit =
+ _subscriptions.fastDispatch(Unsubscribe(subscriber))(_metricsCollectionDispatcher)
+
def buildDefaultCollectionContext: CollectionContext =
- CollectionContext(metricsExtConfig.getInt("default-collection-context-buffer-size"))
+ CollectionContext(settings.defaultCollectionContextBufferSize)
+
+ def instrumentFactory(category: String): InstrumentFactory =
+ settings.instrumentFactories.getOrElse(category, settings.defaultInstrumentFactory)
+
+ /**
+ * Collect and dispatch.
+ */
+ private def collectSnapshots(): Map[Entity, EntitySnapshot] = {
+ val builder = Map.newBuilder[Entity, EntitySnapshot]
+ _trackedEntities.foreach {
+ case (identity, recorder) ⇒ builder += ((identity, recorder.collect(_collectionContext)))
+ }
- def printInitializationMessage(eventStream: EventStream, disableWeaverMissingError: Boolean): Unit = {
+ builder.result()
+ }
+
+ /* def printInitializationMessage(eventStream: EventStream, disableWeaverMissingError: Boolean): Unit = {
if (!disableWeaverMissingError) {
val weaverMissingMessage =
"""
@@ -123,22 +151,6 @@ class MetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
eventStream.publish(Error("MetricsExtension", classOf[MetricsExtension], weaverMissingMessage))
}
- }
+ }*/
}
-object Metrics extends ExtensionId[MetricsExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
- def createExtension(system: ExtendedActorSystem): MetricsExtension = new MetricsExtension(system)
-
- case class MetricGroupFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
- def accept(name: String): Boolean = includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
- }
-
- implicit class AtomicGetOrElseUpdateForTriemap[K, V](trieMap: TrieMap[K, V]) {
- def atomicGetOrElseUpdate(key: K, op: ⇒ V): V =
- trieMap.get(key) match {
- case Some(v) ⇒ v
- case None ⇒ val d = op; trieMap.putIfAbsent(key, d).getOrElse(d)
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala b/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala
new file mode 100644
index 00000000..ca1db850
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/MetricsExtensionSettings.scala
@@ -0,0 +1,100 @@
+package kamon.metric
+
+import akka.actor.ExtendedActorSystem
+import com.typesafe.config.Config
+import kamon.metric.instrument.{ RefreshScheduler, InstrumentFactory, DefaultInstrumentSettings, InstrumentCustomSettings }
+import kamon.util.GlobPathFilter
+
+import scala.concurrent.duration.FiniteDuration
+
+/**
+ * Configuration settings for the Metrics extension, as read from the `kamon.metric` configuration key.
+ */
+case class MetricsExtensionSettings(
+ tickInterval: FiniteDuration,
+ defaultCollectionContextBufferSize: Int,
+ trackUnmatchedEntities: Boolean,
+ entityFilters: Map[String, EntityFilter],
+ instrumentFactories: Map[String, InstrumentFactory],
+ defaultInstrumentFactory: InstrumentFactory,
+ metricCollectionDispatcher: String,
+ refreshSchedulerDispatcher: String,
+ refreshScheduler: RefreshScheduler)
+
+/**
+ *
+ */
+case class EntityFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
+ def accept(name: String): Boolean =
+ includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
+}
+
+object MetricsExtensionSettings {
+ import kamon.util.ConfigTools.Syntax
+ import scala.concurrent.duration._
+
+ def apply(system: ExtendedActorSystem): MetricsExtensionSettings = {
+ val metricConfig = system.settings.config.getConfig("kamon.metric")
+
+ val tickInterval = metricConfig.getFiniteDuration("tick-interval")
+ val collectBufferSize = metricConfig.getInt("default-collection-context-buffer-size")
+ val trackUnmatchedEntities = metricConfig.getBoolean("track-unmatched-entities")
+ val entityFilters = loadFilters(metricConfig.getConfig("filters"))
+ val defaultInstrumentSettings = DefaultInstrumentSettings.fromConfig(metricConfig.getConfig("default-instrument-settings"))
+ val metricCollectionDispatcher = metricConfig.getString("dispatchers.metric-collection")
+ val refreshSchedulerDispatcher = metricConfig.getString("dispatchers.refresh-scheduler")
+
+ val refreshScheduler = RefreshScheduler(system.scheduler, system.dispatchers.lookup(refreshSchedulerDispatcher))
+ val instrumentFactories = loadInstrumentFactories(metricConfig.getConfig("instrument-settings"), defaultInstrumentSettings, refreshScheduler)
+ val defaultInstrumentFactory = new InstrumentFactory(Map.empty, defaultInstrumentSettings, refreshScheduler)
+
+ MetricsExtensionSettings(tickInterval, collectBufferSize, trackUnmatchedEntities, entityFilters, instrumentFactories,
+ defaultInstrumentFactory, metricCollectionDispatcher, refreshSchedulerDispatcher, refreshScheduler)
+ }
+
+ /**
+ * Load all the default filters configured under the `kamon.metric.filters` configuration key. All filters are
+ * defined with the entity category as a sub-key of the `kamon.metric.filters` key and two sub-keys to it: includes
+ * and excludes with lists of string glob patterns as values. Example:
+ *
+ * {{{
+ *
+ * kamon.metrics.filters {
+ * actor {
+ * includes = ["user/test-actor", "user/service/worker-*"]
+ * excludes = ["user/IO-*"]
+ * }
+ * }
+ *
+ * }}}
+ *
+ * @return a Map from category name to corresponding entity filter.
+ */
+ def loadFilters(filtersConfig: Config): Map[String, EntityFilter] = {
+ import scala.collection.JavaConverters._
+
+ filtersConfig.firstLevelKeys map { category: String ⇒
+ val includes = filtersConfig.getStringList(s"$category.includes").asScala.map(inc ⇒ new GlobPathFilter(inc)).toList
+ val excludes = filtersConfig.getStringList(s"$category.excludes").asScala.map(exc ⇒ new GlobPathFilter(exc)).toList
+
+ (category, EntityFilter(includes, excludes))
+ } toMap
+ }
+
+ /**
+ * Load any custom configuration settings defined under the `kamon.metric.instrument-settings` configuration key and
+ * create InstrumentFactories for them.
+ *
+ * @return a Map from category name to InstrumentFactory.
+ */
+ def loadInstrumentFactories(instrumentSettings: Config, defaults: DefaultInstrumentSettings, refreshScheduler: RefreshScheduler): Map[String, InstrumentFactory] = {
+ instrumentSettings.firstLevelKeys.map { category ⇒
+ val categoryConfig = instrumentSettings.getConfig(category)
+ val customSettings = categoryConfig.firstLevelKeys.map { instrumentName ⇒
+ (instrumentName, InstrumentCustomSettings.fromConfig(categoryConfig.getConfig(instrumentName)))
+ } toMap
+
+ (category, new InstrumentFactory(customSettings, defaults, refreshScheduler))
+ } toMap
+ }
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/Scale.scala b/kamon-core/src/main/scala/kamon/metric/Scale.scala
deleted file mode 100644
index 2f27c1a3..00000000
--- a/kamon-core/src/main/scala/kamon/metric/Scale.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-class Scale(val numericValue: Double) extends AnyVal
-
-object Scale {
- val Nano = new Scale(1E-9)
- val Micro = new Scale(1E-6)
- val Milli = new Scale(1E-3)
- val Unit = new Scale(1)
- val Kilo = new Scale(1E3)
- val Mega = new Scale(1E6)
- val Giga = new Scale(1E9)
-
- def convert(fromUnit: Scale, toUnit: Scale, value: Long): Double = (value * fromUnit.numericValue) / toUnit.numericValue
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala b/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
deleted file mode 100644
index a22e1c21..00000000
--- a/kamon-core/src/main/scala/kamon/metric/Subscriptions.scala
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metric
-
-import akka.actor._
-import kamon.metric.Subscriptions._
-import kamon.util.GlobPathFilter
-import scala.concurrent.duration.{ FiniteDuration, Duration }
-import java.util.concurrent.TimeUnit
-import kamon.{ MilliTimestamp, Kamon }
-import kamon.metric.TickMetricSnapshotBuffer.FlushBuffer
-
-class Subscriptions extends Actor {
- import context.system
-
- val flushMetricsSchedule = scheduleFlushMessage()
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
-
- var lastTick: MilliTimestamp = MilliTimestamp.now
- var oneShotSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
- var permanentSubscriptions: Map[ActorRef, MetricSelectionFilter] = Map.empty
-
- def receive = {
- case Subscribe(category, selection, subscriber, permanent) ⇒ subscribe(category, selection, subscriber, permanent)
- case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber)
- case Terminated(subscriber) ⇒ unsubscribe(subscriber)
- case FlushMetrics ⇒ flush()
- }
-
- def subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanent: Boolean): Unit = {
- context.watch(subscriber)
- val newFilter: MetricSelectionFilter = GroupAndPatternFilter(category, new GlobPathFilter(selection))
-
- if (permanent) {
- permanentSubscriptions = permanentSubscriptions.updated(subscriber, newFilter combine {
- permanentSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
- })
- } else {
- oneShotSubscriptions = oneShotSubscriptions.updated(subscriber, newFilter combine {
- oneShotSubscriptions.getOrElse(subscriber, MetricSelectionFilter.empty)
- })
- }
- }
-
- def unsubscribe(subscriber: ActorRef): Unit = {
- if (permanentSubscriptions.contains(subscriber))
- permanentSubscriptions = permanentSubscriptions - subscriber
-
- if (oneShotSubscriptions.contains(subscriber))
- oneShotSubscriptions = oneShotSubscriptions - subscriber
- }
-
- def flush(): Unit = {
- val currentTick = MilliTimestamp.now
- val snapshots = collectAll()
-
- dispatchSelectedMetrics(lastTick, currentTick, permanentSubscriptions, snapshots)
- dispatchSelectedMetrics(lastTick, currentTick, oneShotSubscriptions, snapshots)
-
- lastTick = currentTick
- oneShotSubscriptions = Map.empty
- }
-
- def collectAll(): Map[MetricGroupIdentity, MetricGroupSnapshot] = {
- val allMetrics = Kamon(Metrics).storage
- val builder = Map.newBuilder[MetricGroupIdentity, MetricGroupSnapshot]
-
- allMetrics.foreach {
- case (identity, recorder) ⇒ builder += ((identity, recorder.collect(collectionContext)))
- }
-
- builder.result()
- }
-
- def dispatchSelectedMetrics(lastTick: MilliTimestamp, currentTick: MilliTimestamp, subscriptions: Map[ActorRef, MetricSelectionFilter],
- snapshots: Map[MetricGroupIdentity, MetricGroupSnapshot]): Unit = {
-
- for ((subscriber, filter) ← subscriptions) {
- val selection = snapshots.filter(group ⇒ filter.accept(group._1))
- val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
-
- subscriber ! tickMetrics
- }
- }
-
- def scheduleFlushMessage(): Cancellable = {
- val config = context.system.settings.config
- val tickInterval = Duration(config.getDuration("kamon.metrics.tick-interval", TimeUnit.NANOSECONDS), TimeUnit.NANOSECONDS)
- context.system.scheduler.schedule(tickInterval, tickInterval, self, FlushMetrics)(context.dispatcher)
- }
-}
-
-object Subscriptions {
- case object FlushMetrics
- case class Unsubscribe(subscriber: ActorRef)
- case class Subscribe(category: MetricGroupCategory, selection: String, subscriber: ActorRef, permanently: Boolean = false)
- case class TickMetricSnapshot(from: MilliTimestamp, to: MilliTimestamp, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot])
-
- trait MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean
- }
-
- object MetricSelectionFilter {
- val empty = new MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = false
- }
-
- implicit class CombinableMetricSelectionFilter(msf: MetricSelectionFilter) {
- def combine(that: MetricSelectionFilter): MetricSelectionFilter = new MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = msf.accept(identity) || that.accept(identity)
- }
- }
- }
-
- case class GroupAndPatternFilter(category: MetricGroupCategory, globFilter: GlobPathFilter) extends MetricSelectionFilter {
- def accept(identity: MetricGroupIdentity): Boolean = {
- category.equals(identity.category) && globFilter.accept(identity.name)
- }
- }
-}
-
-class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
- val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
- val collectionContext = Kamon(Metrics)(context.system).buildDefaultCollectionContext
-
- def receive = empty
-
- def empty: Actor.Receive = {
- case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
- case FlushBuffer ⇒ // Nothing to flush.
- }
-
- def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
- case TickMetricSnapshot(_, to, tickMetrics) ⇒
- val combinedMetrics = combineMaps(buffered.metrics, tickMetrics)(mergeMetricGroup)
- val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
-
- context become (buffering(combinedSnapshot))
-
- case FlushBuffer ⇒
- receiver ! buffered
- context become (empty)
-
- }
-
- override def postStop(): Unit = {
- flushSchedule.cancel()
- super.postStop()
- }
-
- def mergeMetricGroup(left: MetricGroupSnapshot, right: MetricGroupSnapshot) = left.merge(right.asInstanceOf[left.GroupSnapshotType], collectionContext).asInstanceOf[MetricGroupSnapshot] // ??? //Combined(combineMaps(left.metrics, right.metrics)((l, r) ⇒ l.merge(r, collectionContext)))
-}
-
-object TickMetricSnapshotBuffer {
- case object FlushBuffer
-
- def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
- Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
-}
diff --git a/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala b/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala
new file mode 100644
index 00000000..f616be35
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/SubscriptionsDispatcher.scala
@@ -0,0 +1,115 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.metric
+
+import akka.actor._
+import kamon.metric.SubscriptionsDispatcher._
+import kamon.util.{ MilliTimestamp, GlobPathFilter }
+import scala.concurrent.duration.FiniteDuration
+
+/**
+ * Manages subscriptions to metrics and dispatch snapshots on every tick to all subscribers.
+ */
+private[kamon] class SubscriptionsDispatcher(interval: FiniteDuration, collector: () ⇒ Map[Entity, EntitySnapshot]) extends Actor {
+ var lastTick = MilliTimestamp.now
+ var oneShotSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ var permanentSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ val tickSchedule = context.system.scheduler.schedule(interval, interval, self, Tick)(context.dispatcher)
+
+ def receive = {
+ case Tick ⇒ processTick()
+ case Subscribe(filter, subscriber, permanently) ⇒ subscribe(filter, subscriber, permanently)
+ case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber)
+ case Terminated(subscriber) ⇒ unsubscribe(subscriber)
+ }
+
+ def processTick(): Unit =
+ dispatch(collector())
+
+ def subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanent: Boolean): Unit = {
+ def addSubscription(storage: Map[ActorRef, SubscriptionFilter]): Map[ActorRef, SubscriptionFilter] =
+ storage.updated(subscriber, storage.getOrElse(subscriber, SubscriptionFilter.Empty).combine(filter))
+
+ context.watch(subscriber)
+
+ if (permanent)
+ permanentSubscriptions = addSubscription(permanentSubscriptions)
+ else
+ oneShotSubscriptions = addSubscription(oneShotSubscriptions)
+ }
+
+ def unsubscribe(subscriber: ActorRef): Unit = {
+ permanentSubscriptions = permanentSubscriptions - subscriber
+ oneShotSubscriptions = oneShotSubscriptions - subscriber
+ }
+
+ def dispatch(snapshots: Map[Entity, EntitySnapshot]): Unit = {
+ val currentTick = MilliTimestamp.now
+
+ dispatchSelections(lastTick, currentTick, permanentSubscriptions, snapshots)
+ dispatchSelections(lastTick, currentTick, oneShotSubscriptions, snapshots)
+
+ lastTick = currentTick
+ oneShotSubscriptions = Map.empty[ActorRef, SubscriptionFilter]
+ }
+
+ def dispatchSelections(lastTick: MilliTimestamp, currentTick: MilliTimestamp, subscriptions: Map[ActorRef, SubscriptionFilter],
+ snapshots: Map[Entity, EntitySnapshot]): Unit = {
+
+ for ((subscriber, filter) ← subscriptions) {
+ val selection = snapshots.filter(group ⇒ filter.accept(group._1))
+ val tickMetrics = TickMetricSnapshot(lastTick, currentTick, selection)
+
+ subscriber ! tickMetrics
+ }
+ }
+}
+
+object SubscriptionsDispatcher {
+ def props(interval: FiniteDuration, collector: () ⇒ Map[Entity, EntitySnapshot]): Props =
+ Props(new SubscriptionsDispatcher(interval, collector))
+
+ case object Tick
+ case class Unsubscribe(subscriber: ActorRef)
+ case class Subscribe(filter: SubscriptionFilter, subscriber: ActorRef, permanently: Boolean = false)
+ case class TickMetricSnapshot(from: MilliTimestamp, to: MilliTimestamp, metrics: Map[Entity, EntitySnapshot])
+
+}
+
+trait SubscriptionFilter { self ⇒
+
+ def accept(entity: Entity): Boolean
+
+ final def combine(that: SubscriptionFilter): SubscriptionFilter = new SubscriptionFilter {
+ override def accept(entity: Entity): Boolean = self.accept(entity) || that.accept(entity)
+ }
+}
+
+object SubscriptionFilter {
+ val Empty = new SubscriptionFilter {
+ def accept(entity: Entity): Boolean = false
+ }
+
+ def apply(category: String, name: String): SubscriptionFilter = new SubscriptionFilter {
+ val categoryPattern = new GlobPathFilter(category)
+ val namePattern = new GlobPathFilter(name)
+
+ def accept(entity: Entity): Boolean = {
+ categoryPattern.accept(entity.category) && namePattern.accept(entity.name)
+ }
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala b/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala
new file mode 100644
index 00000000..b9127118
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/TickMetricSnapshotBuffer.scala
@@ -0,0 +1,49 @@
+package kamon.metric
+
+import akka.actor.{ Props, Actor, ActorRef }
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.metric.TickMetricSnapshotBuffer.FlushBuffer
+import kamon.metric.instrument.CollectionContext
+import kamon.util.MapMerge
+
+import scala.concurrent.duration.FiniteDuration
+
+class TickMetricSnapshotBuffer(flushInterval: FiniteDuration, receiver: ActorRef) extends Actor {
+ import MapMerge.Syntax
+
+ val flushSchedule = context.system.scheduler.schedule(flushInterval, flushInterval, self, FlushBuffer)(context.dispatcher)
+ val collectionContext: CollectionContext = Kamon(Metrics)(context.system).buildDefaultCollectionContext
+
+ def receive = empty
+
+ def empty: Actor.Receive = {
+ case tick: TickMetricSnapshot ⇒ context become (buffering(tick))
+ case FlushBuffer ⇒ // Nothing to flush.
+ }
+
+ def buffering(buffered: TickMetricSnapshot): Actor.Receive = {
+ case TickMetricSnapshot(_, to, tickMetrics) ⇒
+ val combinedMetrics = buffered.metrics.merge(tickMetrics, (l, r) ⇒ l.merge(r, collectionContext))
+ val combinedSnapshot = TickMetricSnapshot(buffered.from, to, combinedMetrics)
+
+ context become (buffering(combinedSnapshot))
+
+ case FlushBuffer ⇒
+ receiver ! buffered
+ context become (empty)
+
+ }
+
+ override def postStop(): Unit = {
+ flushSchedule.cancel()
+ super.postStop()
+ }
+}
+
+object TickMetricSnapshotBuffer {
+ case object FlushBuffer
+
+ def props(flushInterval: FiniteDuration, receiver: ActorRef): Props =
+ Props[TickMetricSnapshotBuffer](new TickMetricSnapshotBuffer(flushInterval, receiver))
+}
diff --git a/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
index eaad6e0d..3da9c1d4 100644
--- a/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/metric/TraceMetrics.scala
@@ -16,67 +16,29 @@
package kamon.metric
-import akka.actor.ActorSystem
-import kamon.metric.instrument.{ Histogram }
+import kamon.metric.instrument.{ Time, InstrumentFactory, Histogram }
-import scala.collection.concurrent.TrieMap
-import com.typesafe.config.Config
+class TraceMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ import TraceMetrics.segmentKey
-case class TraceMetrics(name: String) extends MetricGroupIdentity {
- val category = TraceMetrics
-}
-
-object TraceMetrics extends MetricGroupCategory {
- import Metrics.AtomicGetOrElseUpdateForTriemap
-
- val name = "trace"
-
- case object ElapsedTime extends MetricIdentity { val name = "elapsed-time" }
-
- case class TraceMetricRecorder(elapsedTime: Histogram, private val segmentRecorderFactory: () ⇒ Histogram)
- extends MetricGroupRecorder {
-
- val segments = TrieMap[MetricIdentity, Histogram]()
-
- def segmentRecorder(segmentIdentity: MetricIdentity): Histogram =
- segments.atomicGetOrElseUpdate(segmentIdentity, segmentRecorderFactory.apply())
-
- def collect(context: CollectionContext): TraceMetricsSnapshot =
- TraceMetricsSnapshot(
- elapsedTime.collect(context),
- segments.map { case (identity, recorder) ⇒ (identity, recorder.collect(context)) }.toMap)
+ /**
+ * Records blah blah
+ */
+ val ElapsedTime = histogram("elapsed-time", unitOfMeasurement = Time.Nanoseconds)
- def cleanup: Unit = {}
- }
-
- case class TraceMetricsSnapshot(elapsedTime: Histogram.Snapshot, segments: Map[MetricIdentity, Histogram.Snapshot])
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = TraceMetricsSnapshot
-
- def merge(that: TraceMetricsSnapshot, context: CollectionContext): TraceMetricsSnapshot =
- TraceMetricsSnapshot(elapsedTime.merge(that.elapsedTime, context), combineMaps(segments, that.segments)((l, r) ⇒ l.merge(r, context)))
-
- def metrics: Map[MetricIdentity, MetricSnapshot] = segments + (ElapsedTime -> elapsedTime)
- }
-
- val Factory = TraceMetricGroupFactory
+ /**
+ * Records Blah Blah.
+ *
+ */
+ def segment(name: String, category: String, library: String): Histogram =
+ histogram(segmentKey(name, category, library))
}
-case object TraceMetricGroupFactory extends MetricGroupFactory {
-
- import TraceMetrics._
-
- type GroupRecorder = TraceMetricRecorder
-
- def create(config: Config, system: ActorSystem): TraceMetricRecorder = {
- val settings = config.getConfig("precision.trace")
- val elapsedTimeConfig = settings.getConfig("elapsed-time")
- val segmentConfig = settings.getConfig("segment")
+object TraceMetrics extends EntityRecorderFactory[TraceMetrics] {
+ def category: String = "trace"
+ def createRecorder(instrumentFactory: InstrumentFactory): TraceMetrics = new TraceMetrics(instrumentFactory)
- new TraceMetricRecorder(
- Histogram.fromConfig(elapsedTimeConfig, Scale.Nano),
- () ⇒ Histogram.fromConfig(segmentConfig, Scale.Nano))
- }
+ def segmentKey(name: String, category: String, library: String): HistogramKey =
+ HistogramKey(name, Time.Nanoseconds, Map("category" -> category, "library" -> library))
} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
index b7ac1ac5..5e1a7629 100644
--- a/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
+++ b/kamon-core/src/main/scala/kamon/metric/UserMetrics.scala
@@ -1,189 +1,193 @@
package kamon.metric
import akka.actor
-import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
+import akka.actor.{ ActorSystem, ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
import kamon.Kamon
-import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.metric.instrument._
import scala.concurrent.duration.FiniteDuration
-class UserMetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- import Metrics.AtomicGetOrElseUpdateForTriemap
- import UserMetrics._
-
- lazy val metricsExtension = Kamon(Metrics)(system)
- val precisionConfig = system.settings.config.getConfig("kamon.metrics.precision")
-
- val defaultHistogramPrecisionConfig = precisionConfig.getConfig("default-histogram-precision")
- val defaultMinMaxCounterPrecisionConfig = precisionConfig.getConfig("default-min-max-counter-precision")
- val defaultGaugePrecisionConfig = precisionConfig.getConfig("default-gauge-precision")
+object UserMetrics extends ExtensionId[UserMetricsExtension] with ExtensionIdProvider {
+ override def get(system: ActorSystem): UserMetricsExtension = super.get(system)
+ def lookup(): ExtensionId[_ <: actor.Extension] = UserMetrics
+ def createExtension(system: ExtendedActorSystem): UserMetricsExtension = {
+ val metricsExtension = Metrics.get(system)
+ val instrumentFactory = metricsExtension.instrumentFactory(entity.category)
+ val userMetricsExtension = new UserMetricsExtensionImpl(instrumentFactory)
- def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserHistogram(name), {
- UserHistogramRecorder(Histogram(highestTrackableValue, precision, Scale.Unit))
- }).asInstanceOf[UserHistogramRecorder].histogram
+ metricsExtension.register(entity, userMetricsExtension).recorder
}
- def registerHistogram(name: String): Histogram = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserHistogram(name), {
- UserHistogramRecorder(Histogram.fromConfig(defaultHistogramPrecisionConfig))
- }).asInstanceOf[UserHistogramRecorder].histogram
- }
+ val entity = Entity("user-metric", "user-metric")
+}
- def registerCounter(name: String): Counter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserCounter(name), {
- UserCounterRecorder(Counter())
- }).asInstanceOf[UserCounterRecorder].counter
- }
+trait UserMetricsExtension extends Kamon.Extension {
+ def histogram(name: String): Histogram
+ def histogram(name: String, dynamicRange: DynamicRange): Histogram
+ def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram
+ def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram
+ def histogram(key: HistogramKey): Histogram
+ def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram
+ def removeHistogram(name: String): Unit
+ def removeHistogram(key: HistogramKey): Unit
+
+ def minMaxCounter(name: String): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter
+ def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter
+ def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter
+ def removeMinMaxCounter(name: String): Unit
+ def removeMinMaxCounter(key: MinMaxCounterKey): Unit
+
+ def gauge(name: String, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge
+ def removeGauge(name: String): Unit
+ def removeGauge(key: GaugeKey): Unit
+
+ def counter(name: String): Counter
+ def counter(key: CounterKey): Counter
+ def removeCounter(name: String): Unit
+ def removeCounter(key: CounterKey): Unit
- def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration): MinMaxCounter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserMinMaxCounter(name), {
- UserMinMaxCounterRecorder(MinMaxCounter(highestTrackableValue, precision, Scale.Unit, refreshInterval, system))
- }).asInstanceOf[UserMinMaxCounterRecorder].minMaxCounter
- }
+}
- def registerMinMaxCounter(name: String): MinMaxCounter = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserMinMaxCounter(name), {
- UserMinMaxCounterRecorder(MinMaxCounter.fromConfig(defaultMinMaxCounterPrecisionConfig, system))
- }).asInstanceOf[UserMinMaxCounterRecorder].minMaxCounter
- }
+class UserMetricsExtensionImpl(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with UserMetricsExtension {
+ override def histogram(name: String): Histogram =
+ super.histogram(name)
- def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserGauge(name), {
- UserGaugeRecorder(Gauge.fromConfig(defaultGaugePrecisionConfig, system)(currentValueCollector))
- }).asInstanceOf[UserGaugeRecorder].gauge
- }
+ override def histogram(name: String, dynamicRange: DynamicRange): Histogram =
+ super.histogram(name, dynamicRange)
- def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge = {
- metricsExtension.storage.atomicGetOrElseUpdate(UserGauge(name), {
- UserGaugeRecorder(Gauge(precision, highestTrackableValue, Scale.Unit, refreshInterval, system)(currentValueCollector))
- }).asInstanceOf[UserGaugeRecorder].gauge
- }
+ override def histogram(name: String, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ super.histogram(name, unitOfMeasurement)
- def removeHistogram(name: String): Unit =
- metricsExtension.unregister(UserHistogram(name))
+ override def histogram(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): Histogram =
+ super.histogram(name, dynamicRange, unitOfMeasurement)
- def removeCounter(name: String): Unit =
- metricsExtension.unregister(UserCounter(name))
+ override def histogram(key: HistogramKey): Histogram =
+ super.histogram(key)
- def removeMinMaxCounter(name: String): Unit =
- metricsExtension.unregister(UserMinMaxCounter(name))
+ override def histogram(key: HistogramKey, dynamicRange: DynamicRange): Histogram =
+ super.histogram(key, dynamicRange)
- def removeGauge(name: String): Unit =
- metricsExtension.unregister(UserGauge(name))
-}
+ override def removeHistogram(name: String): Unit =
+ super.removeHistogram(name)
-object UserMetrics extends ExtensionId[UserMetricsExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Metrics
+ override def removeHistogram(key: HistogramKey): Unit =
+ super.removeHistogram(key)
- def createExtension(system: ExtendedActorSystem): UserMetricsExtension = new UserMetricsExtension(system)
+ override def minMaxCounter(name: String): MinMaxCounter =
+ super.minMaxCounter(name)
- sealed trait UserMetricGroup
- //
- // Histograms
- //
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange)
- case class UserHistogram(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserHistograms
- }
+ override def minMaxCounter(name: String, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(name, refreshInterval)
- case class UserHistogramRecorder(histogram: Histogram) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserHistogramSnapshot(histogram.collect(context))
+ override def minMaxCounter(name: String, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, unitOfMeasurement)
- def cleanup: Unit = histogram.cleanup
- }
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, refreshInterval)
- case class UserHistogramSnapshot(histogramSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserHistogramSnapshot
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, unitOfMeasurement)
- def merge(that: UserHistogramSnapshot, context: CollectionContext): UserHistogramSnapshot =
- UserHistogramSnapshot(that.histogramSnapshot.merge(histogramSnapshot, context))
+ override def minMaxCounter(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, refreshInterval, unitOfMeasurement)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, histogramSnapshot))
- }
+ override def minMaxCounter(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement): MinMaxCounter =
+ super.minMaxCounter(name, dynamicRange, refreshInterval, unitOfMeasurement)
- //
- // Counters
- //
+ override def minMaxCounter(key: MinMaxCounterKey): MinMaxCounter =
+ super.minMaxCounter(key)
- case class UserCounter(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserCounters
- }
+ override def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange): MinMaxCounter =
+ super.minMaxCounter(key, dynamicRange)
- case class UserCounterRecorder(counter: Counter) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserCounterSnapshot(counter.collect(context))
+ override def minMaxCounter(key: MinMaxCounterKey, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(key, refreshInterval)
- def cleanup: Unit = counter.cleanup
- }
+ override def minMaxCounter(key: MinMaxCounterKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration): MinMaxCounter =
+ super.minMaxCounter(key, dynamicRange, refreshInterval)
- case class UserCounterSnapshot(counterSnapshot: Counter.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserCounterSnapshot
+ override def removeMinMaxCounter(name: String): Unit =
+ super.removeMinMaxCounter(name)
- def merge(that: UserCounterSnapshot, context: CollectionContext): UserCounterSnapshot =
- UserCounterSnapshot(that.counterSnapshot.merge(counterSnapshot, context))
+ override def removeMinMaxCounter(key: MinMaxCounterKey): Unit =
+ super.removeMinMaxCounter(key)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((Count, counterSnapshot))
- }
+ override def gauge(name: String, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, valueCollector)
- //
- // MinMaxCounters
- //
+ override def gauge(name: String, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, valueCollector)
- case class UserMinMaxCounter(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserMinMaxCounters
- }
+ override def gauge(name: String, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, refreshInterval, valueCollector)
- case class UserMinMaxCounterRecorder(minMaxCounter: MinMaxCounter) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserMinMaxCounterSnapshot(minMaxCounter.collect(context))
+ override def gauge(name: String, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, unitOfMeasurement, valueCollector)
- def cleanup: Unit = minMaxCounter.cleanup
- }
+ override def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, refreshInterval, valueCollector)
- case class UserMinMaxCounterSnapshot(minMaxCounterSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserMinMaxCounterSnapshot
+ override def gauge(name: String, dynamicRange: DynamicRange, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, unitOfMeasurement, valueCollector)
- def merge(that: UserMinMaxCounterSnapshot, context: CollectionContext): UserMinMaxCounterSnapshot =
- UserMinMaxCounterSnapshot(that.minMaxCounterSnapshot.merge(minMaxCounterSnapshot, context))
+ override def gauge(name: String, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, refreshInterval, unitOfMeasurement, valueCollector)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, minMaxCounterSnapshot))
- }
-
- //
- // Gauges
- //
+ override def gauge(name: String, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, unitOfMeasurement: UnitOfMeasurement, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(name, dynamicRange, refreshInterval, unitOfMeasurement, valueCollector)
- case class UserGauge(name: String) extends MetricGroupIdentity with UserMetricGroup {
- val category = UserGauges
- }
+ override def gauge(key: GaugeKey, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, valueCollector)
- case class UserGaugeRecorder(gauge: Gauge) extends MetricGroupRecorder {
- def collect(context: CollectionContext): MetricGroupSnapshot =
- UserGaugeSnapshot(gauge.collect(context))
+ override def gauge(key: GaugeKey, dynamicRange: DynamicRange, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, dynamicRange, valueCollector)
- def cleanup: Unit = gauge.cleanup
- }
+ override def gauge(key: GaugeKey, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, refreshInterval, valueCollector)
- case class UserGaugeSnapshot(gaugeSnapshot: Histogram.Snapshot) extends MetricGroupSnapshot {
- type GroupSnapshotType = UserGaugeSnapshot
+ override def gauge(key: GaugeKey, dynamicRange: DynamicRange, refreshInterval: FiniteDuration, valueCollector: CurrentValueCollector): Gauge =
+ super.gauge(key, dynamicRange, refreshInterval, valueCollector)
- def merge(that: UserGaugeSnapshot, context: CollectionContext): UserGaugeSnapshot =
- UserGaugeSnapshot(that.gaugeSnapshot.merge(gaugeSnapshot, context))
+ override def removeGauge(name: String): Unit =
+ super.removeGauge(name)
- def metrics: Map[MetricIdentity, MetricSnapshot] = Map((RecordedValues, gaugeSnapshot))
- }
+ override def removeGauge(key: GaugeKey): Unit =
+ super.removeGauge(key)
- case object UserHistograms extends MetricGroupCategory { val name: String = "histogram" }
- case object UserCounters extends MetricGroupCategory { val name: String = "counter" }
- case object UserMinMaxCounters extends MetricGroupCategory { val name: String = "min-max-counter" }
- case object UserGauges extends MetricGroupCategory { val name: String = "gauge" }
+ override def counter(name: String): Counter =
+ super.counter(name)
- case object RecordedValues extends MetricIdentity { val name: String = "values" }
- case object Count extends MetricIdentity { val name: String = "count" }
+ override def counter(key: CounterKey): Counter =
+ super.counter(key)
-}
+ override def removeCounter(name: String): Unit =
+ super.removeCounter(name)
+ override def removeCounter(key: CounterKey): Unit =
+ super.removeCounter(key)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala b/kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala
index e79090a8..e79090a8 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/hdrhistogram/AtomicHistogramFieldsAccessor.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/AtomicHistogramFieldsAccessor.scala
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
index 0f29ba6f..c1b69cbe 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Counter.scala
@@ -17,9 +17,8 @@
package kamon.metric.instrument
import kamon.jsr166.LongAdder
-import kamon.metric.{ CollectionContext, MetricSnapshot, MetricRecorder }
-trait Counter extends MetricRecorder {
+trait Counter extends Instrument {
type SnapshotType = Counter.Snapshot
def increment(): Unit
@@ -29,12 +28,11 @@ trait Counter extends MetricRecorder {
object Counter {
def apply(): Counter = new LongAdderCounter
+ def create(): Counter = apply()
- trait Snapshot extends MetricSnapshot {
- type SnapshotType = Counter.Snapshot
-
+ trait Snapshot extends InstrumentSnapshot {
def count: Long
- def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Counter.Snapshot
}
}
@@ -55,5 +53,8 @@ class LongAdderCounter extends Counter {
}
case class CounterSnapshot(count: Long) extends Counter.Snapshot {
- def merge(that: Counter.Snapshot, context: CollectionContext): Counter.Snapshot = CounterSnapshot(count + that.count)
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Counter.Snapshot = that match {
+ case CounterSnapshot(thatCount) ⇒ CounterSnapshot(count + thatCount)
+ case other ⇒ sys.error(s"Cannot merge a CounterSnapshot with the incompatible [${other.getClass.getName}] type.")
+ }
} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
index efd7d78f..2341504c 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Gauge.scala
@@ -1,70 +1,89 @@
package kamon.metric.instrument
-import java.util.concurrent.TimeUnit
-import java.util.concurrent.atomic.AtomicReference
+import java.util.concurrent.atomic.{ AtomicLong, AtomicLongFieldUpdater, AtomicReference }
-import akka.actor.{ Cancellable, ActorSystem }
-import com.typesafe.config.Config
-import kamon.metric.{ CollectionContext, Scale, MetricRecorder }
+import akka.actor.Cancellable
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
import scala.concurrent.duration.FiniteDuration
-trait Gauge extends MetricRecorder {
+trait Gauge extends Instrument {
type SnapshotType = Histogram.Snapshot
- def record(value: Long)
- def record(value: Long, count: Long)
+ def record(value: Long): Unit
+ def record(value: Long, count: Long): Unit
+ def refreshValue(): Unit
}
object Gauge {
- trait CurrentValueCollector {
- def currentValue: Long
- }
-
- def apply(precision: Histogram.Precision, highestTrackableValue: Long, scale: Scale, refreshInterval: FiniteDuration,
- system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
-
- val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
- val gauge = new HistogramBackedGauge(underlyingHistogram, currentValueCollector)
-
- val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ def apply(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler, valueCollector: CurrentValueCollector): Gauge = {
+ val underlyingHistogram = Histogram(dynamicRange)
+ val gauge = new HistogramBackedGauge(underlyingHistogram, valueCollector)
+ val refreshValuesSchedule = scheduler.schedule(refreshInterval, () ⇒ {
gauge.refreshValue()
- }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+ })
- gauge.refreshValuesSchedule.set(refreshValuesSchedule)
+ gauge.automaticValueCollectorSchedule.set(refreshValuesSchedule)
gauge
}
- def fromDefaultConfig(system: ActorSystem)(currentValueCollectorFunction: () ⇒ Long): Gauge =
- fromDefaultConfig(system, functionZeroAsCurrentValueCollector(currentValueCollectorFunction))
+ def create(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler, valueCollector: CurrentValueCollector): Gauge =
+ apply(dynamicRange, refreshInterval, scheduler, valueCollector)
- def fromDefaultConfig(system: ActorSystem, currentValueCollector: CurrentValueCollector): Gauge = {
- val config = system.settings.config.getConfig("kamon.metrics.precision.default-gauge-precision")
- fromConfig(config, system)(currentValueCollector)
+ trait CurrentValueCollector {
+ def currentValue: Long
}
- def fromConfig(config: Config, system: ActorSystem, scale: Scale)(currentValueCollector: CurrentValueCollector): Gauge = {
- import scala.concurrent.duration._
+ implicit def functionZeroAsCurrentValueCollector(f: () ⇒ Long): CurrentValueCollector = new CurrentValueCollector {
+ def currentValue: Long = f.apply()
+ }
+}
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
- val refreshInterval = config.getDuration("refresh-interval", TimeUnit.MILLISECONDS)
+/**
+ * Helper for cases in which a gauge shouldn't store the current value of a observed value but the difference between
+ * the current observed value and the previously observed value. Should only be used if the observed value is always
+ * increasing or staying steady, but is never able to decrease.
+ *
+ * Note: The first time a value is collected, this wrapper will always return zero, afterwards, the difference between
+ * the current value and the last value will be returned.
+ */
+class DifferentialValueCollector(wrappedValueCollector: CurrentValueCollector) extends CurrentValueCollector {
+ @volatile private var _readAtLeastOnce = false
+ private val _lastObservedValue = new AtomicLong(0)
+
+ def currentValue: Long = {
+ if (_readAtLeastOnce) {
+ val wrappedCurrent = wrappedValueCollector.currentValue
+ val d = wrappedCurrent - _lastObservedValue.getAndSet(wrappedCurrent)
+
+ if (d < 0)
+ println("HUBO MENOR QUE CERO")
+
+ d
+
+ } else {
+ _lastObservedValue.set(wrappedValueCollector.currentValue)
+ _readAtLeastOnce = true
+ 0
+ }
- Gauge(Histogram.Precision(significantDigits), highest, scale, refreshInterval.millis, system)(currentValueCollector)
}
+}
- def fromConfig(config: Config, system: ActorSystem)(currentValueCollector: CurrentValueCollector): Gauge = {
- fromConfig(config, system, Scale.Unit)(currentValueCollector)
- }
+object DifferentialValueCollector {
+ def apply(wrappedValueCollector: CurrentValueCollector): CurrentValueCollector =
+ new DifferentialValueCollector(wrappedValueCollector)
- implicit def functionZeroAsCurrentValueCollector(f: () ⇒ Long): CurrentValueCollector = new CurrentValueCollector {
- def currentValue: Long = f.apply()
- }
+ def apply(wrappedValueCollector: ⇒ Long): CurrentValueCollector =
+ new DifferentialValueCollector(new CurrentValueCollector {
+ def currentValue: Long = wrappedValueCollector
+ })
}
class HistogramBackedGauge(underlyingHistogram: Histogram, currentValueCollector: Gauge.CurrentValueCollector) extends Gauge {
- val refreshValuesSchedule = new AtomicReference[Cancellable]()
+ private[kamon] val automaticValueCollectorSchedule = new AtomicReference[Cancellable]()
def record(value: Long): Unit = underlyingHistogram.record(value)
@@ -73,10 +92,15 @@ class HistogramBackedGauge(underlyingHistogram: Histogram, currentValueCollector
def collect(context: CollectionContext): Histogram.Snapshot = underlyingHistogram.collect(context)
def cleanup: Unit = {
- if (refreshValuesSchedule.get() != null)
- refreshValuesSchedule.get().cancel()
+ if (automaticValueCollectorSchedule.get() != null)
+ automaticValueCollectorSchedule.get().cancel()
}
- def refreshValue(): Unit = underlyingHistogram.record(currentValueCollector.currentValue)
+ def refreshValue(): Unit = {
+ val a = currentValueCollector.currentValue
+ if (a < 0)
+ println("RECORDING FROM GAUGE => " + a + " - " + currentValueCollector.getClass)
+ underlyingHistogram.record(a)
+ }
}
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
index bed75fc8..5c4c7f71 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Histogram.scala
@@ -17,12 +17,11 @@
package kamon.metric.instrument
import java.nio.LongBuffer
-import com.typesafe.config.Config
import org.HdrHistogram.AtomicHistogramFieldsAccessor
+import kamon.metric.instrument.Histogram.{ Snapshot, DynamicRange }
import org.HdrHistogram.AtomicHistogram
-import kamon.metric._
-trait Histogram extends MetricRecorder {
+trait Histogram extends Instrument {
type SnapshotType = Histogram.Snapshot
def record(value: Long)
@@ -31,30 +30,40 @@ trait Histogram extends MetricRecorder {
object Histogram {
- def apply(highestTrackableValue: Long, precision: Precision, scale: Scale): Histogram =
- new HdrHistogram(1L, highestTrackableValue, precision.significantDigits, scale)
-
- def fromConfig(config: Config): Histogram = {
- fromConfig(config, Scale.Unit)
- }
-
- def fromConfig(config: Config, scale: Scale): Histogram = {
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
-
- new HdrHistogram(1L, highest, significantDigits, scale)
- }
-
- object HighestTrackableValue {
- val OneHourInNanoseconds = 3600L * 1000L * 1000L * 1000L
- }
-
- case class Precision(significantDigits: Int)
- object Precision {
- val Low = Precision(1)
- val Normal = Precision(2)
- val Fine = Precision(3)
- }
+ /**
+ * Scala API:
+ *
+ * Create a new High Dynamic Range Histogram ([[kamon.metric.instrument.HdrHistogram]]) using the given
+ * [[kamon.metric.instrument.Histogram.DynamicRange]].
+ */
+ def apply(dynamicRange: DynamicRange): Histogram = new HdrHistogram(dynamicRange)
+
+ /**
+ * Java API:
+ *
+ * Create a new High Dynamic Range Histogram ([[kamon.metric.instrument.HdrHistogram]]) using the given
+ * [[kamon.metric.instrument.Histogram.DynamicRange]].
+ */
+ def create(dynamicRange: DynamicRange): Histogram = apply(dynamicRange)
+
+ /**
+ * DynamicRange is a configuration object used to supply range and precision configuration to a
+ * [[kamon.metric.instrument.HdrHistogram]]. See the [[http://hdrhistogram.github.io/HdrHistogram/ HdrHistogram website]]
+ * for more details on how it works and the effects of these configuration values.
+ *
+ * @param lowestDiscernibleValue
+ * The lowest value that can be discerned (distinguished from 0) by the histogram.Must be a positive integer that
+ * is >= 1. May be internally rounded down to nearest power of 2.
+ *
+ * @param highestTrackableValue
+ * The highest value to be tracked by the histogram. Must be a positive integer that is >= (2 * lowestDiscernibleValue).
+ * Must not be larger than (Long.MAX_VALUE/2).
+ *
+ * @param precision
+ * The number of significant decimal digits to which the histogram will maintain value resolution and separation.
+ * Must be a non-negative integer between 1 and 3.
+ */
+ case class DynamicRange(lowestDiscernibleValue: Long, highestTrackableValue: Long, precision: Int)
trait Record {
def level: Long
@@ -67,29 +76,28 @@ object Histogram {
var rawCompactRecord: Long = 0L
}
- trait Snapshot extends MetricSnapshot {
- type SnapshotType = Histogram.Snapshot
+ trait Snapshot extends InstrumentSnapshot {
def isEmpty: Boolean = numberOfMeasurements == 0
- def scale: Scale
def numberOfMeasurements: Long
def min: Long
def max: Long
def sum: Long
def percentile(percentile: Double): Long
def recordsIterator: Iterator[Record]
+ def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot
def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot
}
object Snapshot {
- def empty(targetScale: Scale) = new Snapshot {
+ val empty = new Snapshot {
override def min: Long = 0L
override def max: Long = 0L
override def sum: Long = 0L
override def percentile(percentile: Double): Long = 0L
override def recordsIterator: Iterator[Record] = Iterator.empty
- override def merge(that: Snapshot, context: CollectionContext): Snapshot = that
- override def scale: Scale = targetScale
+ override def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot = that
+ override def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot = that
override def numberOfMeasurements: Long = 0L
}
}
@@ -100,10 +108,8 @@ object Histogram {
* The collect(..) operation extracts all the recorded values from the histogram and resets the counts, but still
* leave it in a consistent state even in the case of concurrent modification while the snapshot is being taken.
*/
-class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, significantValueDigits: Int, scale: Scale = Scale.Unit)
- extends AtomicHistogram(lowestTrackableValue, highestTrackableValue, significantValueDigits)
- with Histogram with AtomicHistogramFieldsAccessor {
-
+class HdrHistogram(dynamicRange: DynamicRange) extends AtomicHistogram(dynamicRange.lowestDiscernibleValue,
+ dynamicRange.highestTrackableValue, dynamicRange.precision) with Histogram with AtomicHistogramFieldsAccessor {
import AtomicHistogramFieldsAccessor.totalCountUpdater
def record(value: Long): Unit = recordValue(value)
@@ -119,7 +125,7 @@ class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, sign
val measurementsArray = Array.ofDim[Long](buffer.limit())
buffer.get(measurementsArray, 0, measurementsArray.length)
- new CompactHdrSnapshot(scale, nrOfMeasurements, measurementsArray, unitMagnitude(), subBucketHalfCount(), subBucketHalfCountMagnitude())
+ new CompactHdrSnapshot(nrOfMeasurements, measurementsArray, unitMagnitude(), subBucketHalfCount(), subBucketHalfCountMagnitude())
}
def getCounts = countsArray().length()
@@ -160,7 +166,7 @@ class HdrHistogram(lowestTrackableValue: Long, highestTrackableValue: Long, sign
}
-case class CompactHdrSnapshot(val scale: Scale, val numberOfMeasurements: Long, compactRecords: Array[Long], unitMagnitude: Int,
+case class CompactHdrSnapshot(val numberOfMeasurements: Long, compactRecords: Array[Long], unitMagnitude: Int,
subBucketHalfCount: Int, subBucketHalfCountMagnitude: Int) extends Histogram.Snapshot {
def min: Long = if (compactRecords.length == 0) 0 else levelFromCompactRecord(compactRecords(0))
@@ -182,53 +188,61 @@ case class CompactHdrSnapshot(val scale: Scale, val numberOfMeasurements: Long,
percentileLevel
}
- def merge(that: Histogram.Snapshot, context: CollectionContext): Histogram.Snapshot = {
- if (that.isEmpty) this else if (this.isEmpty) that else {
- import context.buffer
- buffer.clear()
+ def merge(that: Histogram.Snapshot, context: CollectionContext): Snapshot =
+ merge(that.asInstanceOf[InstrumentSnapshot], context)
- val selfIterator = recordsIterator
- val thatIterator = that.recordsIterator
- var thatCurrentRecord: Histogram.Record = null
- var mergedNumberOfMeasurements = 0L
+ def merge(that: InstrumentSnapshot, context: CollectionContext): Histogram.Snapshot = that match {
+ case thatSnapshot: CompactHdrSnapshot ⇒
+ if (thatSnapshot.isEmpty) this else if (this.isEmpty) thatSnapshot else {
+ import context.buffer
+ buffer.clear()
- def nextOrNull(iterator: Iterator[Histogram.Record]): Histogram.Record = if (iterator.hasNext) iterator.next() else null
- def addToBuffer(compactRecord: Long): Unit = {
- mergedNumberOfMeasurements += countFromCompactRecord(compactRecord)
- buffer.put(compactRecord)
- }
+ val selfIterator = recordsIterator
+ val thatIterator = thatSnapshot.recordsIterator
+ var thatCurrentRecord: Histogram.Record = null
+ var mergedNumberOfMeasurements = 0L
- while (selfIterator.hasNext) {
- val selfCurrentRecord = selfIterator.next()
+ def nextOrNull(iterator: Iterator[Histogram.Record]): Histogram.Record = if (iterator.hasNext) iterator.next() else null
+ def addToBuffer(compactRecord: Long): Unit = {
+ mergedNumberOfMeasurements += countFromCompactRecord(compactRecord)
+ buffer.put(compactRecord)
+ }
- // Advance that to no further than the level of selfCurrentRecord
- thatCurrentRecord = if (thatCurrentRecord == null) nextOrNull(thatIterator) else thatCurrentRecord
- while (thatCurrentRecord != null && thatCurrentRecord.level < selfCurrentRecord.level) {
- addToBuffer(thatCurrentRecord.rawCompactRecord)
- thatCurrentRecord = nextOrNull(thatIterator)
+ while (selfIterator.hasNext) {
+ val selfCurrentRecord = selfIterator.next()
+
+ // Advance that to no further than the level of selfCurrentRecord
+ thatCurrentRecord = if (thatCurrentRecord == null) nextOrNull(thatIterator) else thatCurrentRecord
+ while (thatCurrentRecord != null && thatCurrentRecord.level < selfCurrentRecord.level) {
+ addToBuffer(thatCurrentRecord.rawCompactRecord)
+ thatCurrentRecord = nextOrNull(thatIterator)
+ }
+
+ // Include the current record of self and optionally merge if has the same level as thatCurrentRecord
+ if (thatCurrentRecord != null && thatCurrentRecord.level == selfCurrentRecord.level) {
+ addToBuffer(mergeCompactRecords(thatCurrentRecord.rawCompactRecord, selfCurrentRecord.rawCompactRecord))
+ thatCurrentRecord = nextOrNull(thatIterator)
+ } else {
+ addToBuffer(selfCurrentRecord.rawCompactRecord)
+ }
}
- // Include the current record of self and optionally merge if has the same level as thatCurrentRecord
- if (thatCurrentRecord != null && thatCurrentRecord.level == selfCurrentRecord.level) {
- addToBuffer(mergeCompactRecords(thatCurrentRecord.rawCompactRecord, selfCurrentRecord.rawCompactRecord))
- thatCurrentRecord = nextOrNull(thatIterator)
- } else {
- addToBuffer(selfCurrentRecord.rawCompactRecord)
+ // Include everything that might have been left from that
+ if (thatCurrentRecord != null) addToBuffer(thatCurrentRecord.rawCompactRecord)
+ while (thatIterator.hasNext) {
+ addToBuffer(thatIterator.next().rawCompactRecord)
}
- }
- // Include everything that might have been left from that
- if (thatCurrentRecord != null) addToBuffer(thatCurrentRecord.rawCompactRecord)
- while (thatIterator.hasNext) {
- addToBuffer(thatIterator.next().rawCompactRecord)
+ buffer.flip()
+ val compactRecords = Array.ofDim[Long](buffer.limit())
+ buffer.get(compactRecords)
+
+ new CompactHdrSnapshot(mergedNumberOfMeasurements, compactRecords, unitMagnitude, subBucketHalfCount, subBucketHalfCountMagnitude)
}
- buffer.flip()
- val compactRecords = Array.ofDim[Long](buffer.limit())
- buffer.get(compactRecords)
+ case other ⇒
+ sys.error(s"Cannot merge a CompactHdrSnapshot with the incompatible [${other.getClass.getName}] type.")
- new CompactHdrSnapshot(scale, mergedNumberOfMeasurements, compactRecords, unitMagnitude, subBucketHalfCount, subBucketHalfCountMagnitude)
- }
}
@inline private def mergeCompactRecords(left: Long, right: Long): Long = {
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala b/kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala
new file mode 100644
index 00000000..8cacc767
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/Instrument.scala
@@ -0,0 +1,56 @@
+package kamon.metric.instrument
+
+import java.nio.LongBuffer
+
+import akka.actor.{ Scheduler, Cancellable }
+import akka.dispatch.MessageDispatcher
+import scala.concurrent.duration.FiniteDuration
+
+private[kamon] trait Instrument {
+ type SnapshotType <: InstrumentSnapshot
+
+ def collect(context: CollectionContext): SnapshotType
+ def cleanup: Unit
+}
+
+trait InstrumentSnapshot {
+ def merge(that: InstrumentSnapshot, context: CollectionContext): InstrumentSnapshot
+}
+
+class InstrumentType private[kamon] (val id: Int) extends AnyVal
+object InstrumentTypes {
+ val Histogram = new InstrumentType(1)
+ val MinMaxCounter = new InstrumentType(2)
+ val Gauge = new InstrumentType(3)
+ val Counter = new InstrumentType(4)
+}
+
+trait CollectionContext {
+ def buffer: LongBuffer
+}
+
+object CollectionContext {
+ def apply(longBufferSize: Int): CollectionContext = new CollectionContext {
+ val buffer: LongBuffer = LongBuffer.allocate(longBufferSize)
+ }
+}
+
+trait RefreshScheduler {
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable
+}
+
+object RefreshScheduler {
+ val NoopScheduler = new RefreshScheduler {
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable = new Cancellable {
+ override def isCancelled: Boolean = true
+ override def cancel(): Boolean = true
+ }
+ }
+
+ def apply(scheduler: Scheduler, dispatcher: MessageDispatcher): RefreshScheduler = new RefreshScheduler {
+ def schedule(interval: FiniteDuration, refresh: () ⇒ Unit): Cancellable =
+ scheduler.schedule(interval, interval)(refresh.apply())(dispatcher)
+ }
+
+ def create(scheduler: Scheduler, dispatcher: MessageDispatcher): RefreshScheduler = apply(scheduler, dispatcher)
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala
new file mode 100644
index 00000000..9b0c85cb
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentFactory.scala
@@ -0,0 +1,35 @@
+package kamon.metric.instrument
+
+import kamon.metric.instrument.Gauge.CurrentValueCollector
+import kamon.metric.instrument.Histogram.DynamicRange
+
+import scala.concurrent.duration.FiniteDuration
+
+case class InstrumentFactory(configurations: Map[String, InstrumentCustomSettings], defaults: DefaultInstrumentSettings, scheduler: RefreshScheduler) {
+
+ private def resolveSettings(instrumentName: String, codeSettings: Option[InstrumentSettings], default: InstrumentSettings): InstrumentSettings = {
+ configurations.get(instrumentName).flatMap { customSettings ⇒
+ codeSettings.map(cs ⇒ customSettings.combine(cs)) orElse (Some(customSettings.combine(default)))
+
+ } getOrElse (codeSettings.getOrElse(default))
+ }
+
+ def createHistogram(name: String, dynamicRange: Option[DynamicRange] = None): Histogram = {
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, None)), defaults.histogram)
+ Histogram(settings.dynamicRange)
+ }
+
+ def createMinMaxCounter(name: String, dynamicRange: Option[DynamicRange] = None, refreshInterval: Option[FiniteDuration] = None): MinMaxCounter = {
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, refreshInterval)), defaults.minMaxCounter)
+ MinMaxCounter(settings.dynamicRange, settings.refreshInterval.get, scheduler)
+ }
+
+ def createGauge(name: String, dynamicRange: Option[DynamicRange] = None, refreshInterval: Option[FiniteDuration] = None,
+ valueCollector: CurrentValueCollector): Gauge = {
+
+ val settings = resolveSettings(name, dynamicRange.map(dr ⇒ InstrumentSettings(dr, refreshInterval)), defaults.gauge)
+ Gauge(settings.dynamicRange, settings.refreshInterval.get, scheduler, valueCollector)
+ }
+
+ def createCounter(): Counter = Counter()
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala
new file mode 100644
index 00000000..1446a25d
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/InstrumentSettings.scala
@@ -0,0 +1,67 @@
+package kamon.metric.instrument
+
+import java.util.concurrent.TimeUnit
+
+import com.typesafe.config.Config
+import kamon.metric.instrument.Histogram.DynamicRange
+
+import scala.concurrent.duration.FiniteDuration
+
+case class InstrumentCustomSettings(lowestDiscernibleValue: Option[Long], highestTrackableValue: Option[Long],
+ precision: Option[Int], refreshInterval: Option[FiniteDuration]) {
+
+ def combine(that: InstrumentSettings): InstrumentSettings =
+ InstrumentSettings(
+ DynamicRange(
+ lowestDiscernibleValue.getOrElse(that.dynamicRange.lowestDiscernibleValue),
+ highestTrackableValue.getOrElse(that.dynamicRange.highestTrackableValue),
+ precision.getOrElse(that.dynamicRange.precision)),
+ refreshInterval.orElse(that.refreshInterval))
+}
+
+object InstrumentCustomSettings {
+ import scala.concurrent.duration._
+
+ def fromConfig(config: Config): InstrumentCustomSettings =
+ InstrumentCustomSettings(
+ if (config.hasPath("lowest-discernible-value")) Some(config.getLong("lowest-discernible-value")) else None,
+ if (config.hasPath("highest-trackable-value")) Some(config.getLong("highest-trackable-value")) else None,
+ if (config.hasPath("precision")) Some(InstrumentSettings.parsePrecision(config.getString("precision"))) else None,
+ if (config.hasPath("refresh-interval")) Some(config.getDuration("refresh-interval", TimeUnit.NANOSECONDS).nanos) else None)
+
+}
+
+case class InstrumentSettings(dynamicRange: DynamicRange, refreshInterval: Option[FiniteDuration])
+
+object InstrumentSettings {
+
+ def readDynamicRange(config: Config): DynamicRange =
+ DynamicRange(
+ config.getLong("lowest-discernible-value"),
+ config.getLong("highest-trackable-value"),
+ parsePrecision(config.getString("precision")))
+
+ def parsePrecision(stringValue: String): Int = stringValue match {
+ case "low" ⇒ 1
+ case "normal" ⇒ 2
+ case "fine" ⇒ 3
+ case other ⇒ sys.error(s"Invalid precision configuration [$other] found, valid options are: [low|normal|fine].")
+ }
+}
+
+case class DefaultInstrumentSettings(histogram: InstrumentSettings, minMaxCounter: InstrumentSettings, gauge: InstrumentSettings)
+
+object DefaultInstrumentSettings {
+
+ def fromConfig(config: Config): DefaultInstrumentSettings = {
+ import scala.concurrent.duration._
+
+ val histogramSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("histogram")), None)
+ val minMaxCounterSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("min-max-counter")),
+ Some(config.getDuration("min-max-counter.refresh-interval", TimeUnit.NANOSECONDS).nanos))
+ val gaugeSettings = InstrumentSettings(InstrumentSettings.readDynamicRange(config.getConfig("gauge")),
+ Some(config.getDuration("gauge.refresh-interval", TimeUnit.NANOSECONDS).nanos))
+
+ DefaultInstrumentSettings(histogramSettings, minMaxCounterSettings, gaugeSettings)
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
index 4882d2aa..0828c8a9 100644
--- a/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/MinMaxCounter.scala
@@ -17,16 +17,14 @@ package kamon.metric.instrument
*/
import java.lang.Math.abs
-import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicReference
-import akka.actor.{ ActorSystem, Cancellable }
-import com.typesafe.config.Config
+import akka.actor.Cancellable
import kamon.jsr166.LongMaxUpdater
-import kamon.metric.{ Scale, MetricRecorder, CollectionContext }
+import kamon.metric.instrument.Histogram.DynamicRange
import kamon.util.PaddedAtomicLong
import scala.concurrent.duration.FiniteDuration
-trait MinMaxCounter extends MetricRecorder {
+trait MinMaxCounter extends Instrument {
override type SnapshotType = Histogram.Snapshot
def increment(): Unit
@@ -38,29 +36,20 @@ trait MinMaxCounter extends MetricRecorder {
object MinMaxCounter {
- def apply(highestTrackableValue: Long, precision: Histogram.Precision, scale: Scale, refreshInterval: FiniteDuration,
- system: ActorSystem): MinMaxCounter = {
-
- val underlyingHistogram = Histogram(highestTrackableValue, precision, scale)
+ def apply(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler): MinMaxCounter = {
+ val underlyingHistogram = Histogram(dynamicRange)
val minMaxCounter = new PaddedMinMaxCounter(underlyingHistogram)
-
- val refreshValuesSchedule = system.scheduler.schedule(refreshInterval, refreshInterval) {
+ val refreshValuesSchedule = scheduler.schedule(refreshInterval, () ⇒ {
minMaxCounter.refreshValues()
- }(system.dispatcher) // TODO: Move this to Kamon dispatchers
+ })
minMaxCounter.refreshValuesSchedule.set(refreshValuesSchedule)
minMaxCounter
}
- def fromConfig(config: Config, system: ActorSystem): MinMaxCounter = {
- import scala.concurrent.duration._
+ def create(dynamicRange: DynamicRange, refreshInterval: FiniteDuration, scheduler: RefreshScheduler): MinMaxCounter =
+ apply(dynamicRange, refreshInterval, scheduler)
- val highest = config.getLong("highest-trackable-value")
- val significantDigits = config.getInt("significant-value-digits")
- val refreshInterval = config.getDuration("refresh-interval", TimeUnit.MILLISECONDS)
-
- apply(highest, Histogram.Precision(significantDigits), Scale.Unit, refreshInterval.millis, system)
- }
}
class PaddedMinMaxCounter(underlyingHistogram: Histogram) extends MinMaxCounter {
diff --git a/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala b/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala
new file mode 100644
index 00000000..cf6b8b4c
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/metric/instrument/UnitOfMeasurement.scala
@@ -0,0 +1,55 @@
+package kamon.metric.instrument
+
+trait UnitOfMeasurement {
+ def name: String
+ def label: String
+ def factor: Double
+}
+
+object UnitOfMeasurement {
+ case object Unknown extends UnitOfMeasurement {
+ val name = "unknown"
+ val label = "unknown"
+ val factor = 1D
+ }
+
+ def isUnknown(uom: UnitOfMeasurement): Boolean =
+ uom == Unknown
+
+ def isTime(uom: UnitOfMeasurement): Boolean =
+ uom.isInstanceOf[Time]
+
+}
+
+case class Time(factor: Double, label: String) extends UnitOfMeasurement {
+ val name = "time"
+
+ /**
+ * Scale a value from this scale factor to a different scale factor.
+ *
+ * @param toUnit Time unit of the expected result.
+ * @param value Value to scale.
+ * @return Equivalent of value on the target time unit.
+ */
+ def scale(toUnit: Time)(value: Long): Double =
+ (value * factor) / toUnit.factor
+}
+
+object Time {
+ val Nanoseconds = Time(1E-9, "n")
+ val Microseconds = Time(1E-6, "µs")
+ val Milliseconds = Time(1E-3, "ms")
+ val Seconds = Time(1, "s")
+}
+
+case class Memory(factor: Double, label: String) extends UnitOfMeasurement {
+ val name = "bytes"
+}
+
+object Memory {
+ val Bytes = Memory(1, "b")
+ val KiloBytes = Memory(1024, "Kb")
+ val MegaBytes = Memory(1024E2, "Mb")
+ val GigaBytes = Memory(1024E3, "Gb")
+}
+
diff --git a/kamon-core/src/main/scala/kamon/metric/package.scala b/kamon-core/src/main/scala/kamon/metric/package.scala
deleted file mode 100644
index 43166058..00000000
--- a/kamon-core/src/main/scala/kamon/metric/package.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon
-
-import scala.annotation.tailrec
-import com.typesafe.config.Config
-
-package object metric {
-
- @tailrec def combineMaps[K, V](left: Map[K, V], right: Map[K, V])(valueMerger: (V, V) ⇒ V): Map[K, V] = {
- if (right.isEmpty)
- left
- else {
- val (key, rightValue) = right.head
- val value = left.get(key).map(valueMerger(_, rightValue)).getOrElse(rightValue)
-
- combineMaps(left.updated(key, value), right.tail)(valueMerger)
- }
- }
-}
diff --git a/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala b/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
deleted file mode 100644
index 490bc127..00000000
--- a/kamon-core/src/main/scala/kamon/standalone/KamonStandalone.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-package kamon.standalone
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.Kamon
-import kamon.metric.UserMetrics
-import kamon.metric.instrument.{ Gauge, MinMaxCounter, Counter, Histogram }
-
-import scala.concurrent.duration.FiniteDuration
-
-trait KamonStandalone {
- private[kamon] def system: ActorSystem
-
- def registerHistogram(name: String, precision: Histogram.Precision, highestTrackableValue: Long): Histogram =
- Kamon(UserMetrics)(system).registerHistogram(name, precision, highestTrackableValue)
-
- def registerHistogram(name: String): Histogram =
- Kamon(UserMetrics)(system).registerHistogram(name)
-
- def registerCounter(name: String): Counter =
- Kamon(UserMetrics)(system).registerCounter(name)
-
- def registerMinMaxCounter(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration): MinMaxCounter =
- Kamon(UserMetrics)(system).registerMinMaxCounter(name, precision, highestTrackableValue, refreshInterval)
-
- def registerMinMaxCounter(name: String): MinMaxCounter =
- Kamon(UserMetrics)(system).registerMinMaxCounter(name)
-
- def registerGauge(name: String)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
- Kamon(UserMetrics)(system).registerGauge(name)(currentValueCollector)
-
- def registerGauge(name: String, precision: Histogram.Precision, highestTrackableValue: Long,
- refreshInterval: FiniteDuration)(currentValueCollector: Gauge.CurrentValueCollector): Gauge =
- Kamon(UserMetrics)(system).registerGauge(name, precision, highestTrackableValue, refreshInterval)(currentValueCollector)
-
- def removeHistogram(name: String): Unit =
- Kamon(UserMetrics)(system).removeHistogram(name)
-
- def removeCounter(name: String): Unit =
- Kamon(UserMetrics)(system).removeCounter(name)
-
- def removeMinMaxCounter(name: String): Unit =
- Kamon(UserMetrics)(system).removeMinMaxCounter(name)
-
- def removeGauge(name: String): Unit =
- Kamon(UserMetrics)(system).removeGauge(name)
-}
-
-object KamonStandalone {
-
- def buildFromConfig(config: Config): KamonStandalone = buildFromConfig(config, "kamon-standalone")
-
- def buildFromConfig(config: Config, actorSystemName: String): KamonStandalone = new KamonStandalone {
- val system: ActorSystem = ActorSystem(actorSystemName, config)
- }
-}
-
-object EmbeddedKamonStandalone extends KamonStandalone {
- private[kamon] lazy val system = ActorSystem("kamon-standalone")
-} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/Incubator.scala b/kamon-core/src/main/scala/kamon/trace/Incubator.scala
index c39a9984..3b2a3bf9 100644
--- a/kamon-core/src/main/scala/kamon/trace/Incubator.scala
+++ b/kamon-core/src/main/scala/kamon/trace/Incubator.scala
@@ -19,8 +19,8 @@ package kamon.trace
import java.util.concurrent.TimeUnit
import akka.actor.{ ActorLogging, Props, Actor, ActorRef }
-import kamon.{ NanoInterval, RelativeNanoTimestamp }
import kamon.trace.Incubator.{ CheckForCompletedTraces, IncubatingTrace }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp }
import scala.annotation.tailrec
import scala.collection.immutable.Queue
import scala.concurrent.duration._
diff --git a/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala b/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala
index 66c6633d..e62178dd 100644
--- a/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/MetricsOnlyContext.scala
@@ -18,16 +18,16 @@ package kamon.trace
import java.util.concurrent.ConcurrentLinkedQueue
-import akka.actor.ActorSystem
+import akka.actor.{ ExtensionId, ActorSystem }
import akka.event.LoggingAdapter
-import kamon.{ RelativeNanoTimestamp, NanoInterval }
-import kamon.metric.TraceMetrics.TraceMetricRecorder
+import kamon.Kamon.Extension
import kamon.metric.{ MetricsExtension, TraceMetrics }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp }
import scala.annotation.tailrec
-private[trace] class MetricsOnlyContext(traceName: String, val token: String, izOpen: Boolean, val levelOfDetail: LevelOfDetail, val origin: TraceContextOrigin,
- val startRelativeTimestamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension, val system: ActorSystem)
+private[kamon] class MetricsOnlyContext(traceName: String, val token: String, izOpen: Boolean, val levelOfDetail: LevelOfDetail,
+ val startTimestamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension, val actorSystem: ActorSystem)
extends TraceContext {
@volatile private var _name = traceName
@@ -48,35 +48,36 @@ private[trace] class MetricsOnlyContext(traceName: String, val token: String, iz
def isOpen: Boolean = _isOpen
def addMetadata(key: String, value: String): Unit = {}
+ def lookupExtension[T <: Extension](id: ExtensionId[T]): T = id(actorSystem)
+
def finish(): Unit = {
_isOpen = false
- val traceElapsedTime = NanoInterval.since(startRelativeTimestamp)
+ val traceElapsedTime = NanoInterval.since(startTimestamp)
_elapsedTime = traceElapsedTime
- val metricRecorder = metricsExtension.register(TraceMetrics(name), TraceMetrics.Factory)
- metricRecorder.map { traceMetrics ⇒
- traceMetrics.elapsedTime.record(traceElapsedTime.nanos)
- drainFinishedSegments(traceMetrics)
+ metricsExtension.register(TraceMetrics, name).map { registration ⇒
+ registration.recorder.ElapsedTime.record(traceElapsedTime.nanos)
+ drainFinishedSegments(registration.recorder)
}
}
def startSegment(segmentName: String, category: String, library: String): Segment =
new MetricsOnlySegment(segmentName, category, library)
- @tailrec private def drainFinishedSegments(metricRecorder: TraceMetricRecorder): Unit = {
+ @tailrec private def drainFinishedSegments(recorder: TraceMetrics): Unit = {
val segment = _finishedSegments.poll()
if (segment != null) {
- metricRecorder.segmentRecorder(segment.identity).record(segment.duration.nanos)
- drainFinishedSegments(metricRecorder)
+ recorder.segment(segment.name, segment.category, segment.library).record(segment.duration.nanos)
+ drainFinishedSegments(recorder)
}
}
protected def finishSegment(segmentName: String, category: String, library: String, duration: NanoInterval): Unit = {
- _finishedSegments.add(SegmentLatencyData(SegmentMetricIdentity(segmentName, category, library), duration))
+ _finishedSegments.add(SegmentLatencyData(segmentName, category, library, duration))
if (isClosed) {
- metricsExtension.register(TraceMetrics(name), TraceMetrics.Factory).map { traceMetrics ⇒
- drainFinishedSegments(traceMetrics)
+ metricsExtension.register(TraceMetrics, name).map { registration ⇒
+ drainFinishedSegments(registration.recorder)
}
}
}
@@ -118,4 +119,6 @@ private[trace] class MetricsOnlyContext(traceName: String, val token: String, iz
def elapsedTime: NanoInterval = _elapsedTime
def startTimestamp: RelativeNanoTimestamp = _startTimestamp
}
-} \ No newline at end of file
+}
+
+case class SegmentLatencyData(name: String, category: String, library: String, duration: NanoInterval)
diff --git a/kamon-core/src/main/scala/kamon/trace/Sampler.scala b/kamon-core/src/main/scala/kamon/trace/Sampler.scala
index 2308d1d0..5abba221 100644
--- a/kamon-core/src/main/scala/kamon/trace/Sampler.scala
+++ b/kamon-core/src/main/scala/kamon/trace/Sampler.scala
@@ -16,8 +16,7 @@
package kamon.trace
-import kamon.NanoInterval
-import kamon.util.Sequencer
+import kamon.util.{ NanoInterval, Sequencer }
import scala.concurrent.forkjoin.ThreadLocalRandom
trait Sampler {
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
index 60244eaa..ed8170a9 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceContext.scala
@@ -17,26 +17,60 @@
package kamon.trace
import java.io.ObjectStreamException
-import akka.actor.ActorSystem
+import akka.actor.{ ExtensionId, ActorSystem }
+import kamon.Kamon.Extension
import kamon._
import kamon.metric._
import kamon.trace.TraceContextAware.DefaultTraceContextAware
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp }
trait TraceContext {
def name: String
def token: String
- def origin: TraceContextOrigin
def isEmpty: Boolean
def nonEmpty: Boolean = !isEmpty
def isOpen: Boolean
def isClosed: Boolean = !isOpen
- def system: ActorSystem
def finish(): Unit
def rename(newName: String): Unit
+
def startSegment(segmentName: String, category: String, library: String): Segment
def addMetadata(key: String, value: String)
- def startRelativeTimestamp: RelativeNanoTimestamp
+
+ def startTimestamp: RelativeNanoTimestamp
+
+ def lookupExtension[T <: Kamon.Extension](id: ExtensionId[T]): T
+}
+
+object TraceContext {
+ private[kamon] val _traceContextStorage = new ThreadLocal[TraceContext] {
+ override def initialValue(): TraceContext = EmptyTraceContext
+ }
+
+ def currentContext: TraceContext =
+ _traceContextStorage.get()
+
+ def setCurrentContext(context: TraceContext): Unit =
+ _traceContextStorage.set(context)
+
+ def clearCurrentContext: Unit =
+ _traceContextStorage.remove()
+
+ def withContext[T](context: TraceContext)(code: ⇒ T): T = {
+ val oldContext = _traceContextStorage.get()
+ _traceContextStorage.set(context)
+
+ try code finally _traceContextStorage.set(oldContext)
+ }
+
+ def map[T](f: TraceContext ⇒ T): Option[T] = {
+ val current = currentContext
+ if (current.nonEmpty)
+ Some(f(current))
+ else None
+ }
+
}
trait Segment {
@@ -56,16 +90,17 @@ trait Segment {
case object EmptyTraceContext extends TraceContext {
def name: String = "empty-trace"
def token: String = ""
- def origin: TraceContextOrigin = TraceContextOrigin.Local
def isEmpty: Boolean = true
def isOpen: Boolean = false
- def system: ActorSystem = sys.error("Can't obtain a ActorSystem from a EmptyTraceContext.")
def finish(): Unit = {}
def rename(name: String): Unit = {}
def startSegment(segmentName: String, category: String, library: String): Segment = EmptySegment
def addMetadata(key: String, value: String): Unit = {}
- def startRelativeTimestamp = new RelativeNanoTimestamp(0L)
+ def startTimestamp = new RelativeNanoTimestamp(0L)
+
+ override def lookupExtension[T <: Extension](id: ExtensionId[T]): T =
+ sys.error("Can't lookup extensions on a EmptyTraceContext.")
case object EmptySegment extends Segment {
val name: String = "empty-segment"
@@ -80,14 +115,17 @@ case object EmptyTraceContext extends TraceContext {
}
}
-case class SegmentMetricIdentity(name: String, category: String, library: String) extends MetricIdentity
-case class SegmentLatencyData(identity: SegmentMetricIdentity, duration: NanoInterval)
-
object SegmentCategory {
val HttpClient = "http-client"
val Database = "database"
}
+class LOD private[trace] (val level: Int) extends AnyVal
+object LOD {
+ val MetricsOnly = new LOD(1)
+ val SimpleTrace = new LOD(2)
+}
+
sealed trait LevelOfDetail
object LevelOfDetail {
case object MetricsOnly extends LevelOfDetail
@@ -95,12 +133,6 @@ object LevelOfDetail {
case object FullTrace extends LevelOfDetail
}
-sealed trait TraceContextOrigin
-object TraceContextOrigin {
- case object Local extends TraceContextOrigin
- case object Remote extends TraceContextOrigin
-}
-
trait TraceContextAware extends Serializable {
def traceContext: TraceContext
}
@@ -109,7 +141,7 @@ object TraceContextAware {
def default: TraceContextAware = new DefaultTraceContextAware
class DefaultTraceContextAware extends TraceContextAware {
- @transient val traceContext = TraceRecorder.currentContext
+ @transient val traceContext = TraceContext.currentContext
//
// Beware of this hack, it might bite us in the future!
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala b/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala
deleted file mode 100644
index 41f022d0..00000000
--- a/kamon-core/src/main/scala/kamon/trace/TraceExtension.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.trace
-
-import java.util.concurrent.TimeUnit
-
-import akka.actor._
-import akka.actor
-import akka.event.Logging
-import kamon._
-import kamon.metric.Metrics
-import kamon.util.GlobPathFilter
-
-class TraceExtension(system: ExtendedActorSystem) extends Kamon.Extension {
- val config = system.settings.config.getConfig("kamon.trace")
- val dispatcher = system.dispatchers.lookup(config.getString("dispatcher"))
-
- val detailLevel: LevelOfDetail = config.getString("level") match {
- case "metrics-only" ⇒ LevelOfDetail.MetricsOnly
- case "simple-trace" ⇒ LevelOfDetail.SimpleTrace
- case other ⇒ sys.error(s"Unknown tracing level $other present in the configuration file.")
- }
-
- val sampler: Sampler =
- if (detailLevel == LevelOfDetail.MetricsOnly) NoSampling
- else config.getString("sampling") match {
- case "all" ⇒ SampleAll
- case "random" ⇒ new RandomSampler(config.getInt("random-sampler.chance"))
- case "ordered" ⇒ new OrderedSampler(config.getInt("ordered-sampler.interval"))
- case "threshold" ⇒ new ThresholdSampler(config.getDuration("threshold-sampler.minimum-elapsed-time", TimeUnit.NANOSECONDS))
- }
-
- val log = Logging(system, "TraceExtension")
- val subscriptions = system.actorOf(Props[TraceSubscriptions], "trace-subscriptions")
- val incubator = system.actorOf(Incubator.props(subscriptions))
- val metricsExtension = Kamon(Metrics)(system)
-
- def newTraceContext(traceName: String, token: String, origin: TraceContextOrigin, system: ActorSystem): TraceContext =
- newTraceContext(traceName, token, true, origin, RelativeNanoTimestamp.now, system)
-
- def newTraceContext(traceName: String, token: String, isOpen: Boolean, origin: TraceContextOrigin,
- startTimestamp: RelativeNanoTimestamp, system: ActorSystem): TraceContext = {
- def newMetricsOnlyContext = new MetricsOnlyContext(traceName, token, isOpen, detailLevel, origin, startTimestamp, log, metricsExtension, system)
-
- if (detailLevel == LevelOfDetail.MetricsOnly || origin == TraceContextOrigin.Remote)
- newMetricsOnlyContext
- else {
- if (!sampler.shouldTrace)
- newMetricsOnlyContext
- else
- new TracingContext(traceName, token, true, detailLevel, origin, startTimestamp, log, metricsExtension, this, system)
- }
- }
-
- def subscribe(subscriber: ActorRef): Unit = subscriptions ! TraceSubscriptions.Subscribe(subscriber)
- def unsubscribe(subscriber: ActorRef): Unit = subscriptions ! TraceSubscriptions.Unsubscribe(subscriber)
-
- private[kamon] def dispatchTracingContext(trace: TracingContext): Unit =
- if (sampler.shouldReport(trace.elapsedTime))
- if (trace.shouldIncubate)
- incubator ! trace
- else
- subscriptions ! trace.generateTraceInfo
-
-}
-
-object Trace extends ExtensionId[TraceExtension] with ExtensionIdProvider {
- def lookup(): ExtensionId[_ <: actor.Extension] = Trace
- def createExtension(system: ExtendedActorSystem): TraceExtension = new TraceExtension(system)
-
- case class MetricGroupFilter(includes: List[GlobPathFilter], excludes: List[GlobPathFilter]) {
- def accept(name: String): Boolean = includes.exists(_.accept(name)) && !excludes.exists(_.accept(name))
- }
-}
-
-case class TraceInfo(name: String, token: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String], segments: List[SegmentInfo])
-case class SegmentInfo(name: String, category: String, library: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String]) \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala b/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
index 84e234f3..057f564e 100644
--- a/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TraceLocal.scala
@@ -42,12 +42,12 @@ object TraceLocal {
object HttpContextKey extends TraceLocal.TraceLocalKey { type ValueType = HttpContext }
- def store(key: TraceLocalKey)(value: key.ValueType): Unit = TraceRecorder.currentContext match {
+ def store(key: TraceLocalKey)(value: key.ValueType): Unit = TraceContext.currentContext match {
case ctx: MetricsOnlyContext ⇒ ctx.traceLocalStorage.store(key)(value)
case EmptyTraceContext ⇒ // Can't store in the empty context.
}
- def retrieve(key: TraceLocalKey): Option[key.ValueType] = TraceRecorder.currentContext match {
+ def retrieve(key: TraceLocalKey): Option[key.ValueType] = TraceContext.currentContext match {
case ctx: MetricsOnlyContext ⇒ ctx.traceLocalStorage.retrieve(key)
case EmptyTraceContext ⇒ None // Can't retrieve anything from the empty context.
}
diff --git a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala b/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
deleted file mode 100644
index 703896c3..00000000
--- a/kamon-core/src/main/scala/kamon/trace/TraceRecorder.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.trace
-
-import kamon.{ MilliTimestamp, RelativeNanoTimestamp, Kamon }
-
-import scala.language.experimental.macros
-import java.util.concurrent.atomic.AtomicLong
-import kamon.macros.InlineTraceContextMacro
-
-import scala.util.Try
-import java.net.InetAddress
-import akka.actor.ActorSystem
-
-object TraceRecorder {
- private val traceContextStorage = new ThreadLocal[TraceContext] {
- override def initialValue(): TraceContext = EmptyTraceContext
- }
-
- private val tokenCounter = new AtomicLong
- private val hostnamePrefix = Try(InetAddress.getLocalHost.getHostName).getOrElse("unknown-localhost")
-
- def newToken: String = hostnamePrefix + "-" + String.valueOf(tokenCounter.incrementAndGet())
-
- private def newTraceContext(name: String, token: Option[String], system: ActorSystem): TraceContext =
- Kamon(Trace)(system).newTraceContext(name, token.getOrElse(newToken), TraceContextOrigin.Local, system)
-
- def joinRemoteTraceContext(traceName: String, traceToken: String, startTimestamp: MilliTimestamp, isOpen: Boolean, system: ActorSystem): TraceContext = {
- val equivalentStartTimestamp = RelativeNanoTimestamp.relativeTo(startTimestamp)
- Kamon(Trace)(system).newTraceContext(traceName, traceToken, isOpen, TraceContextOrigin.Remote, equivalentStartTimestamp, system)
- }
-
- def setContext(context: TraceContext): Unit = traceContextStorage.set(context)
-
- def clearContext: Unit = traceContextStorage.set(EmptyTraceContext)
-
- def currentContext: TraceContext = traceContextStorage.get()
-
- def start(name: String, token: Option[String] = None)(implicit system: ActorSystem) = {
- val ctx = newTraceContext(name, token, system)
- traceContextStorage.set(ctx)
- }
-
- def rename(name: String): Unit = currentContext.rename(name)
-
- def withNewTraceContext[T](name: String, token: Option[String] = None)(thunk: ⇒ T)(implicit system: ActorSystem): T =
- withTraceContext(newTraceContext(name, token, system))(thunk)
-
- def withTraceContext[T](context: TraceContext)(thunk: ⇒ T): T = {
- val oldContext = currentContext
- setContext(context)
-
- try thunk finally setContext(oldContext)
- }
-
- def withTraceContextAndSystem[T](thunk: (TraceContext, ActorSystem) ⇒ T): Option[T] = currentContext match {
- case ctx: MetricsOnlyContext ⇒ Some(thunk(ctx, ctx.system))
- case EmptyTraceContext ⇒ None
- }
-
- def withInlineTraceContextReplacement[T](traceCtx: TraceContext)(thunk: ⇒ T): T = macro InlineTraceContextMacro.withInlineTraceContextImpl[T, TraceContext]
-
- def finish(): Unit = currentContext.finish()
-
-}
diff --git a/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala b/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala
new file mode 100644
index 00000000..41dcd6bc
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TracerExtension.scala
@@ -0,0 +1,94 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+
+package kamon.trace
+
+import java.net.InetAddress
+import java.util.concurrent.atomic.AtomicLong
+
+import akka.actor._
+import akka.actor
+import kamon.Kamon
+import kamon.metric.{ Metrics, MetricsExtension }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp, NanoTimestamp, GlobPathFilter }
+
+import scala.util.Try
+
+object Tracer extends ExtensionId[TracerExtension] with ExtensionIdProvider {
+ override def get(system: ActorSystem): TracerExtension = super.get(system)
+ def lookup(): ExtensionId[_ <: actor.Extension] = Tracer
+ def createExtension(system: ExtendedActorSystem): TracerExtension = new TracerExtensionImpl(system)
+}
+
+trait TracerExtension extends Kamon.Extension {
+ def newContext(name: String): TraceContext
+ def newContext(name: String, token: String): TraceContext
+ def newContext(name: String, token: String, timestamp: RelativeNanoTimestamp, isOpen: Boolean, isLocal: Boolean): TraceContext
+
+ def subscribe(subscriber: ActorRef): Unit
+ def unsubscribe(subscriber: ActorRef): Unit
+}
+
+class TracerExtensionImpl(system: ExtendedActorSystem) extends TracerExtension {
+ private val _settings = TraceSettings(system)
+ private val _metricsExtension = Metrics.get(system)
+
+ private val _hostnamePrefix = Try(InetAddress.getLocalHost.getHostName).getOrElse("unknown-localhost")
+ private val _tokenCounter = new AtomicLong
+ private val _subscriptions = system.actorOf(Props[TraceSubscriptions], "trace-subscriptions")
+ private val _incubator = system.actorOf(Incubator.props(_subscriptions))
+
+ private def newToken: String =
+ _hostnamePrefix + "-" + String.valueOf(_tokenCounter.incrementAndGet())
+
+ def newContext(name: String): TraceContext =
+ createTraceContext(name)
+
+ def newContext(name: String, token: String): TraceContext =
+ createTraceContext(name, token)
+
+ def newContext(name: String, token: String, timestamp: RelativeNanoTimestamp, isOpen: Boolean, isLocal: Boolean): TraceContext =
+ createTraceContext(name, token, timestamp, isOpen, isLocal)
+
+ private def createTraceContext(traceName: String, token: String = newToken, startTimestamp: RelativeNanoTimestamp = RelativeNanoTimestamp.now,
+ isOpen: Boolean = true, isLocal: Boolean = true): TraceContext = {
+
+ def newMetricsOnlyContext = new MetricsOnlyContext(traceName, token, isOpen, _settings.levelOfDetail, startTimestamp, null, _metricsExtension, system)
+
+ if (_settings.levelOfDetail == LevelOfDetail.MetricsOnly || !isLocal)
+ newMetricsOnlyContext
+ else {
+ if (!_settings.sampler.shouldTrace)
+ newMetricsOnlyContext
+ else
+ new TracingContext(traceName, token, true, _settings.levelOfDetail, isLocal, startTimestamp, null, _metricsExtension, this, system, dispatchTracingContext)
+ }
+ }
+
+ def subscribe(subscriber: ActorRef): Unit = _subscriptions ! TraceSubscriptions.Subscribe(subscriber)
+ def unsubscribe(subscriber: ActorRef): Unit = _subscriptions ! TraceSubscriptions.Unsubscribe(subscriber)
+
+ private[kamon] def dispatchTracingContext(trace: TracingContext): Unit =
+ if (_settings.sampler.shouldReport(trace.elapsedTime))
+ if (trace.shouldIncubate)
+ _incubator ! trace
+ else
+ _subscriptions ! trace.generateTraceInfo
+
+}
+
+case class TraceInfo(name: String, token: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String], segments: List[SegmentInfo])
+case class SegmentInfo(name: String, category: String, library: String, timestamp: NanoTimestamp, elapsedTime: NanoInterval, metadata: Map[String, String]) \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala b/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala
new file mode 100644
index 00000000..e6c2d3ef
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/trace/TracerExtensionSettings.scala
@@ -0,0 +1,30 @@
+package kamon.trace
+
+import java.util.concurrent.TimeUnit
+
+import akka.actor.ActorSystem
+
+case class TraceSettings(levelOfDetail: LevelOfDetail, sampler: Sampler)
+
+object TraceSettings {
+ def apply(system: ActorSystem): TraceSettings = {
+ val tracerConfig = system.settings.config.getConfig("kamon.trace")
+
+ val detailLevel: LevelOfDetail = tracerConfig.getString("level-of-detail") match {
+ case "metrics-only" ⇒ LevelOfDetail.MetricsOnly
+ case "simple-trace" ⇒ LevelOfDetail.SimpleTrace
+ case other ⇒ sys.error(s"Unknown tracer level of detail [$other] present in the configuration file.")
+ }
+
+ val sampler: Sampler =
+ if (detailLevel == LevelOfDetail.MetricsOnly) NoSampling
+ else tracerConfig.getString("sampling") match {
+ case "all" ⇒ SampleAll
+ case "random" ⇒ new RandomSampler(tracerConfig.getInt("random-sampler.chance"))
+ case "ordered" ⇒ new OrderedSampler(tracerConfig.getInt("ordered-sampler.interval"))
+ case "threshold" ⇒ new ThresholdSampler(tracerConfig.getDuration("threshold-sampler.minimum-elapsed-time", TimeUnit.NANOSECONDS))
+ }
+
+ TraceSettings(detailLevel, sampler)
+ }
+} \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/trace/TracingContext.scala b/kamon-core/src/main/scala/kamon/trace/TracingContext.scala
index 31ab282d..dd4c3c1a 100644
--- a/kamon-core/src/main/scala/kamon/trace/TracingContext.scala
+++ b/kamon-core/src/main/scala/kamon/trace/TracingContext.scala
@@ -21,14 +21,15 @@ import java.util.concurrent.atomic.AtomicInteger
import akka.actor.ActorSystem
import akka.event.LoggingAdapter
-import kamon.{ NanoInterval, NanoTimestamp, RelativeNanoTimestamp }
+import kamon.util.{ NanoInterval, RelativeNanoTimestamp, NanoTimestamp }
import kamon.metric.MetricsExtension
import scala.collection.concurrent.TrieMap
-private[trace] class TracingContext(traceName: String, token: String, izOpen: Boolean, levelOfDetail: LevelOfDetail, origin: TraceContextOrigin,
- startTimeztamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension, traceExtension: TraceExtension, system: ActorSystem)
- extends MetricsOnlyContext(traceName, token, izOpen, levelOfDetail, origin, startTimeztamp, log, metricsExtension, system) {
+private[trace] class TracingContext(traceName: String, token: String, izOpen: Boolean, levelOfDetail: LevelOfDetail,
+ isLocal: Boolean, startTimeztamp: RelativeNanoTimestamp, log: LoggingAdapter, metricsExtension: MetricsExtension,
+ traceExtension: TracerExtensionImpl, system: ActorSystem, traceInfoSink: TracingContext ⇒ Unit)
+ extends MetricsOnlyContext(traceName, token, izOpen, levelOfDetail, startTimeztamp, log, metricsExtension, system) {
private val _openSegments = new AtomicInteger(0)
private val _startTimestamp = NanoTimestamp.now
@@ -46,7 +47,7 @@ private[trace] class TracingContext(traceName: String, token: String, izOpen: Bo
override def finish(): Unit = {
super.finish()
- traceExtension.dispatchTracingContext(this)
+ traceInfoSink(this)
}
override def finishSegment(segmentName: String, category: String, library: String, duration: NanoInterval): Unit = {
@@ -66,7 +67,7 @@ private[trace] class TracingContext(traceName: String, token: String, izOpen: Bo
while (currentSegments.hasNext()) {
val segment = currentSegments.next()
if (segment.isClosed)
- segmentsInfo += segment.createSegmentInfo(_startTimestamp, startRelativeTimestamp)
+ segmentsInfo += segment.createSegmentInfo(_startTimestamp, startTimestamp)
else
log.warning("Segment [{}] will be left out of TraceInfo because it was still open.", segment.name)
}
diff --git a/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala b/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
index f052f009..961c3099 100644
--- a/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
+++ b/kamon-core/src/main/scala/kamon/trace/logging/LogbackTraceTokenConverter.scala
@@ -17,11 +17,11 @@ package kamon.trace.logging
import ch.qos.logback.classic.pattern.ClassicConverter
import ch.qos.logback.classic.spi.ILoggingEvent
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
class LogbackTraceTokenConverter extends ClassicConverter {
def convert(event: ILoggingEvent): String = {
- val ctx = TraceRecorder.currentContext
+ val ctx = TraceContext.currentContext
if (ctx.isEmpty)
"undefined"
else
diff --git a/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala b/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala
index 4f4efa4d..4970d97e 100644
--- a/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala
+++ b/kamon-core/src/main/scala/kamon/trace/logging/MdcKeysSupport.scala
@@ -17,14 +17,14 @@
package kamon.trace.logging
import kamon.trace.TraceLocal.AvailableToMdc
-import kamon.trace.{ EmptyTraceContext, MetricsOnlyContext, TraceContext, TraceRecorder }
+import kamon.trace.{ EmptyTraceContext, MetricsOnlyContext, TraceContext }
import org.slf4j.MDC
trait MdcKeysSupport {
def withMdc[A](thunk: ⇒ A): A = {
- val keys = copyToMdc(TraceRecorder.currentContext)
+ val keys = copyToMdc(TraceContext.currentContext)
try thunk finally keys.foreach(key ⇒ MDC.remove(key))
}
diff --git a/kamon-core/src/main/scala/kamon/util/ConfigTools.scala b/kamon-core/src/main/scala/kamon/util/ConfigTools.scala
new file mode 100644
index 00000000..9810428e
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/ConfigTools.scala
@@ -0,0 +1,26 @@
+package kamon.util
+
+import java.util.concurrent.TimeUnit
+
+import com.typesafe.config.Config
+
+import scala.concurrent.duration.FiniteDuration
+
+object ConfigTools {
+ implicit class Syntax(val config: Config) extends AnyVal {
+ // We are using the deprecated .getNanoseconds option to keep Kamon source code compatible with
+ // versions of Akka using older typesafe-config versions.
+
+ def getFiniteDuration(path: String): FiniteDuration =
+ FiniteDuration(config.getNanoseconds(path), TimeUnit.NANOSECONDS)
+
+ def firstLevelKeys: Set[String] = {
+ import scala.collection.JavaConverters._
+
+ config.entrySet().asScala.map {
+ case entry ⇒ entry.getKey.takeWhile(_ != '.')
+ } toSet
+ }
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/util/FastDispatch.scala b/kamon-core/src/main/scala/kamon/util/FastDispatch.scala
new file mode 100644
index 00000000..8f23164a
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/FastDispatch.scala
@@ -0,0 +1,22 @@
+package kamon.util
+
+import akka.actor.ActorRef
+
+import scala.concurrent.{ ExecutionContext, Future }
+
+/**
+ * Extension for Future[ActorRef]. Try to dispatch a message to a Future[ActorRef] in the same thread if it has already
+ * completed or do the regular scheduling otherwise. Specially useful when using the ModuleSupervisor extension to
+ * create actors.
+ */
+object FastDispatch {
+ implicit class Syntax(val target: Future[ActorRef]) extends AnyVal {
+
+ def fastDispatch(message: Any)(implicit ec: ExecutionContext): Unit =
+ if (target.isCompleted)
+ target.value.get.map(_ ! message)
+ else
+ target.map(_ ! message)
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/util/MapMerge.scala b/kamon-core/src/main/scala/kamon/util/MapMerge.scala
new file mode 100644
index 00000000..b7f18788
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/MapMerge.scala
@@ -0,0 +1,27 @@
+package kamon.util
+
+object MapMerge {
+
+ /**
+ * Merge to immutable maps with the same key and value types, using the provided valueMerge function.
+ */
+ implicit class Syntax[K, V](val map: Map[K, V]) extends AnyVal {
+ def merge(that: Map[K, V], valueMerge: (V, V) ⇒ V): Map[K, V] = {
+ val merged = Map.newBuilder[K, V]
+
+ map.foreach {
+ case (key, value) ⇒
+ val mergedValue = that.get(key).map(v ⇒ valueMerge(value, v)).getOrElse(value)
+ merged += key -> mergedValue
+ }
+
+ that.foreach {
+ case kv @ (key, _) if !map.contains(key) ⇒ merged += kv
+ case other ⇒ // ignore, already included.
+ }
+
+ merged.result();
+ }
+ }
+
+}
diff --git a/kamon-core/src/main/scala/kamon/TimeUnits.scala b/kamon-core/src/main/scala/kamon/util/Timestamp.scala
index f2933a11..4ff031a6 100644
--- a/kamon-core/src/main/scala/kamon/TimeUnits.scala
+++ b/kamon-core/src/main/scala/kamon/util/Timestamp.scala
@@ -1,7 +1,7 @@
-package kamon
+package kamon.util
/**
- * Epoch time stamp in seconds.
+ * Epoch time stamp.
*/
class Timestamp(val seconds: Long) extends AnyVal {
def <(that: Timestamp): Boolean = this.seconds < that.seconds
@@ -24,7 +24,12 @@ object Timestamp {
*/
class MilliTimestamp(val millis: Long) extends AnyVal {
override def toString: String = String.valueOf(millis) + ".millis"
+
def toTimestamp: Timestamp = new Timestamp(millis / 1000)
+ def toRelativeNanoTimestamp: RelativeNanoTimestamp = {
+ val diff = (System.currentTimeMillis() - millis) * 1000000
+ new RelativeNanoTimestamp(System.nanoTime() - diff)
+ }
}
object MilliTimestamp {
@@ -50,6 +55,9 @@ object NanoTimestamp {
*/
class RelativeNanoTimestamp(val nanos: Long) extends AnyVal {
override def toString: String = String.valueOf(nanos) + ".nanos"
+
+ def toMilliTimestamp: MilliTimestamp =
+ new MilliTimestamp(System.currentTimeMillis - ((System.nanoTime - nanos) / 1000000))
}
object RelativeNanoTimestamp {
diff --git a/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala b/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala
new file mode 100644
index 00000000..cd457cdc
--- /dev/null
+++ b/kamon-core/src/main/scala/kamon/util/TriemapAtomicGetOrElseUpdate.scala
@@ -0,0 +1,18 @@
+package kamon.util
+
+import scala.collection.concurrent.TrieMap
+
+object TriemapAtomicGetOrElseUpdate {
+
+ /**
+ * Workaround to the non thread-safe [[scala.collection.concurrent.TrieMap#getOrElseUpdate]] method. More details on
+ * why this is necessary can be found at [[https://issues.scala-lang.org/browse/SI-7943]].
+ */
+ implicit class Syntax[K, V](val trieMap: TrieMap[K, V]) extends AnyVal {
+ def atomicGetOrElseUpdate(key: K, op: ⇒ V): V =
+ trieMap.get(key) match {
+ case Some(v) ⇒ v
+ case None ⇒ val d = op; trieMap.putIfAbsent(key, d).getOrElse(d)
+ }
+ }
+}
diff --git a/kamon-core/src/test/resources/logback.xml b/kamon-core/src/test/resources/logback.xml
index eb578346..dd623d61 100644
--- a/kamon-core/src/test/resources/logback.xml
+++ b/kamon-core/src/test/resources/logback.xml
@@ -1,17 +1,17 @@
<configuration scan="true">
+ <contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
+ <resetJUL>true</resetJUL>
+ </contextListener>
- <contextListener class="ch.qos.logback.classic.jul.LevelChangePropagator">
- <resetJUL>true</resetJUL>
- </contextListener>
+ <conversionRule conversionWord="traceToken" converterClass="kamon.trace.logging.LogbackTraceTokenConverter"/>
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>%date{HH:mm:ss.SSS} %-5level [%X{uow}][%X{requestId}] [%thread] %logger{55} - %msg%n</pattern>
- </encoder>
- </appender>
-
- <root level="debug">
- <appender-ref ref="STDOUT" />
- </root>
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%date{HH:mm:ss.SSS} %-5level [%traceToken][%thread] %logger{55} - %msg%n</pattern>
+ </encoder>
+ </appender>
+ <root level="error">
+ <appender-ref ref="STDOUT"/>
+ </root>
</configuration>
diff --git a/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
index 9144725e..40200685 100644
--- a/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/SubscriptionsProtocolSpec.scala
@@ -1,128 +1,110 @@
package kamon.metric
import akka.actor._
-import akka.testkit.{ TestProbe, ImplicitSender, TestKitBase }
+import akka.testkit.{ TestProbe, ImplicitSender }
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class SubscriptionsProtocolSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("subscriptions-protocol-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- |}
- """.stripMargin))
+class SubscriptionsProtocolSpec extends BaseKamonSpec("subscriptions-protocol-spec") with ImplicitSender {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ """.stripMargin)
- val metricsExtension = Kamon(Metrics)(system)
- import metricsExtension.{ register, subscribe, unsubscribe }
+ val metricsModule = kamon.metrics
+ import metricsModule.{ register, subscribe, unsubscribe }
"the Subscriptions messaging protocol" should {
"allow subscribing for a single tick" in {
val subscriber = TestProbe()
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = false)
+ register(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = false)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace"))
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
subscriber.expectNoMsg(1 second)
}
"allow subscribing permanently to a metric" in {
val subscriber = TestProbe()
- register(TraceMetrics("permanent"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "permanent", subscriber.ref, permanently = true)
+ register(TraceMetrics, "permanent")
+ subscribe("trace", "permanent", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("permanent"))
- subscriber.expectNoMsg(1 second)
+ tickSnapshot.metrics.keys should contain(Entity("permanent", "trace"))
}
}
"allow subscribing to metrics matching a glob pattern" in {
val subscriber = TestProbe()
- register(TraceMetrics("include-one"), TraceMetrics.Factory)
- register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
- register(TraceMetrics("include-three"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "include-*", subscriber.ref, permanently = true)
+ register(TraceMetrics, "include-one")
+ register(TraceMetrics, "exclude-two")
+ register(TraceMetrics, "include-three")
+ subscribe("trace", "include-*", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(2)
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
- subscriber.expectNoMsg(1 second)
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace"))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace"))
}
}
"send a single TickMetricSnapshot to each subscriber, even if subscribed multiple times" in {
val subscriber = TestProbe()
- register(TraceMetrics("include-one"), TraceMetrics.Factory)
- register(TraceMetrics("exclude-two"), TraceMetrics.Factory)
- register(TraceMetrics("include-three"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "include-one", subscriber.ref, permanently = true)
- subscribe(TraceMetrics, "include-three", subscriber.ref, permanently = true)
+ register(TraceMetrics, "include-one")
+ register(TraceMetrics, "exclude-two")
+ register(TraceMetrics, "include-three")
+ subscribe("trace", "include-one", subscriber.ref, permanently = true)
+ subscribe("trace", "include-three", subscriber.ref, permanently = true)
for (repetition ← 1 to 5) {
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(2)
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-one"))
- tickSnapshot.metrics.keys should contain(TraceMetrics("include-three"))
+ tickSnapshot.metrics.keys should contain(Entity("include-one", "trace"))
+ tickSnapshot.metrics.keys should contain(Entity("include-three", "trace"))
}
}
"allow un-subscribing a subscriber" in {
val subscriber = TestProbe()
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", subscriber.ref, permanently = true)
+ register(TraceMetrics, "one-shot")
+ subscribe("trace", "one-shot", subscriber.ref, permanently = true)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
+ tickSnapshot.metrics.keys should contain(Entity("one-shot", "trace"))
unsubscribe(subscriber.ref)
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
+ flushSubscriptions()
subscriber.expectNoMsg(1 second)
}
+ }
- "watch all subscribers and un-subscribe them if they die" in {
- val subscriber = TestProbe()
- val forwarderSubscriber = system.actorOf(Props(new ForwarderSubscriber(subscriber.ref)))
- watch(forwarderSubscriber)
- register(TraceMetrics("one-shot"), TraceMetrics.Factory)
- subscribe(TraceMetrics, "one-shot", forwarderSubscriber, permanently = true)
-
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- val tickSnapshot = subscriber.expectMsgType[TickMetricSnapshot]
- tickSnapshot.metrics.size should be(1)
- tickSnapshot.metrics.keys should contain(TraceMetrics("one-shot"))
-
- forwarderSubscriber ! PoisonPill
- expectTerminated(forwarderSubscriber)
-
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- metricsExtension.subscriptions ! Subscriptions.FlushMetrics
- subscriber.expectNoMsg(2 seconds)
- }
+ def subscriptionsActor: ActorRef = {
+ val listener = TestProbe()
+ system.actorSelection("/user/kamon/kamon-metrics").tell(Identify(1), listener.ref)
+ listener.expectMsgType[ActorIdentity].ref.get
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
index a9197ab5..2e1f246d 100644
--- a/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/TickMetricSnapshotBufferSpec.scala
@@ -17,32 +17,29 @@
package kamon.metric
import com.typesafe.config.ConfigFactory
-import kamon.{ MilliTimestamp, Kamon }
-import kamon.metric.instrument.Histogram
import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
-import akka.testkit.{ ImplicitSender, TestKitBase }
-import akka.actor.ActorSystem
+import kamon.testkit.BaseKamonSpec
+import kamon.util.MilliTimestamp
+import akka.testkit.ImplicitSender
import scala.concurrent.duration._
-import kamon.metric.Subscriptions.TickMetricSnapshot
-
-class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- |}
- """.stripMargin))
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
+
+class TickMetricSnapshotBufferSpec extends BaseKamonSpec("trace-metrics-spec") with ImplicitSender {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace" ]
+ | }
+ | }
+ |}
+ """.stripMargin)
"the TickMetricSnapshotBuffer" should {
"merge TickMetricSnapshots received until the flush timeout is reached and fix the from/to fields" in new SnapshotFixtures {
@@ -74,7 +71,7 @@ class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Ma
mergedSnapshot.to.millis should equal(4000)
mergedSnapshot.metrics should not be ('empty)
- val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).metrics(TraceMetrics.ElapsedTime).asInstanceOf[Histogram.Snapshot]
+ val testMetricSnapshot = mergedSnapshot.metrics(testTraceIdentity).histogram("elapsed-time").get
testMetricSnapshot.min should equal(10)
testMetricSnapshot.max should equal(300)
testMetricSnapshot.numberOfMeasurements should equal(6)
@@ -88,23 +85,23 @@ class TickMetricSnapshotBufferSpec extends TestKitBase with WordSpecLike with Ma
}
trait SnapshotFixtures {
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- val testTraceIdentity = TraceMetrics("buffer-spec-test-trace")
- val traceRecorder = Kamon(Metrics).register(testTraceIdentity, TraceMetrics.Factory).get
+ val collectionContext = kamon.metrics.buildDefaultCollectionContext
+ val testTraceIdentity = Entity("buffer-spec-test-trace", "trace")
+ val traceRecorder = kamon.metrics.register(TraceMetrics, "buffer-spec-test-trace").get.recorder
val firstEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map.empty)
val secondEmpty = TickMetricSnapshot(new MilliTimestamp(2000), new MilliTimestamp(3000), Map.empty)
val thirdEmpty = TickMetricSnapshot(new MilliTimestamp(3000), new MilliTimestamp(4000), Map.empty)
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(20L)
- traceRecorder.elapsedTime.record(30L)
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(20L)
+ traceRecorder.ElapsedTime.record(30L)
val firstNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
(testTraceIdentity -> traceRecorder.collect(collectionContext))))
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(10L)
- traceRecorder.elapsedTime.record(300L)
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(10L)
+ traceRecorder.ElapsedTime.record(300L)
val secondNonEmpty = TickMetricSnapshot(new MilliTimestamp(1000), new MilliTimestamp(2000), Map(
(testTraceIdentity -> traceRecorder.collect(collectionContext))))
}
diff --git a/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
index cd10f2d3..793c0112 100644
--- a/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/TraceMetricsSpec.scala
@@ -1,92 +1,83 @@
package kamon.metric
-import akka.actor.ActorSystem
-import akka.testkit.{ ImplicitSender, TestKitBase }
+import akka.testkit.ImplicitSender
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.trace.{ SegmentMetricIdentity, TraceRecorder }
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import kamon.metric.instrument.Histogram
-class TraceMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 second
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class TraceMetricsSpec extends BaseKamonSpec("trace-metrics-spec") with ImplicitSender {
+ import TraceMetricsSpec.SegmentSyntax
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ |}
+ """.stripMargin)
"the TraceMetrics" should {
"record the elapsed time between a trace creation and finish" in {
for (repetitions ← 1 to 10) {
- TraceRecorder.withNewTraceContext("record-elapsed-time") {
- TraceRecorder.finish()
+ TraceContext.withContext(newContext("record-elapsed-time")) {
+ TraceContext.currentContext.finish()
}
}
- val snapshot = takeSnapshotOf("record-elapsed-time")
- snapshot.elapsedTime.numberOfMeasurements should be(10)
- snapshot.segments shouldBe empty
+ val snapshot = takeSnapshotOf("record-elapsed-time", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(10)
}
"record the elapsed time for segments that occur inside a given trace" in {
- TraceRecorder.withNewTraceContext("trace-with-segments") {
- val segment = TraceRecorder.currentContext.startSegment("test-segment", "test-category", "test-library")
+ TraceContext.withContext(newContext("trace-with-segments")) {
+ val segment = TraceContext.currentContext.startSegment("test-segment", "test-category", "test-library")
segment.finish()
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("trace-with-segments")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
+ val snapshot = takeSnapshotOf("trace-with-segments", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("test-segment", "test-category", "test-library")).numberOfMeasurements should be(1)
+ snapshot.segment("test-segment", "test-category", "test-library").numberOfMeasurements should be(1)
}
"record the elapsed time for segments that finish after their correspondent trace has finished" in {
- val segment = TraceRecorder.withNewTraceContext("closing-segment-after-trace") {
- val s = TraceRecorder.currentContext.startSegment("test-segment", "test-category", "test-library")
- TraceRecorder.finish()
+ val segment = TraceContext.withContext(newContext("closing-segment-after-trace")) {
+ val s = TraceContext.currentContext.startSegment("test-segment", "test-category", "test-library")
+ TraceContext.currentContext.finish()
s
}
- val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
- beforeFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(1)
+ val beforeFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace", "trace")
+ beforeFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
beforeFinishSegmentSnapshot.segments.size should be(0)
segment.finish()
- val afterFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace")
- afterFinishSegmentSnapshot.elapsedTime.numberOfMeasurements should be(0)
+ val afterFinishSegmentSnapshot = takeSnapshotOf("closing-segment-after-trace", "trace")
+ afterFinishSegmentSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(0)
afterFinishSegmentSnapshot.segments.size should be(1)
- afterFinishSegmentSnapshot.segments(SegmentMetricIdentity("test-segment", "test-category", "test-library")).numberOfMeasurements should be(1)
+ afterFinishSegmentSnapshot.segment("test-segment", "test-category", "test-library").numberOfMeasurements should be(1)
}
}
+}
+
+object TraceMetricsSpec {
+ implicit class SegmentSyntax(val entitySnapshot: EntitySnapshot) extends AnyVal {
+ def segments: Map[HistogramKey, Histogram.Snapshot] = {
+ entitySnapshot.histograms.filterKeys(_.metadata.contains("category"))
+ }
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
+ def segment(name: String, category: String, library: String): Histogram.Snapshot =
+ segments(TraceMetrics.segmentKey(name, category, library))
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
index 6c4fe3fb..a345c6a9 100644
--- a/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/UserMetricsSpec.scala
@@ -1,310 +1,110 @@
package kamon.metric
-import akka.actor.{ Props, ActorSystem }
-import akka.testkit.{ ImplicitSender, TestKitBase }
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.UserMetrics._
-import kamon.metric.instrument.{ Histogram, Counter, MinMaxCounter, Gauge }
-import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class UserMetricsSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("actor-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | default-collection-context-buffer-size = 10
- |
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 10000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 hour
- | highest-trackable-value = 1000
- | significant-value-digits = 2
- | }
- |
- | default-gauge-precision {
- | refresh-interval = 1 hour
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class UserMetricsSpec extends BaseKamonSpec("user-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ | default-collection-context-buffer-size = 10
+ |}
+ """.stripMargin)
"the UserMetrics extension" should {
+
"allow registering a fully configured Histogram and get the same Histogram if registering again" in {
- val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
+ val histogramA = kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
histogramA shouldBe theSameInstanceAs(histogramB)
}
"return the original Histogram when registering a fully configured Histogram for second time but with different settings" in {
- val histogramA = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramB = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Fine, 50000L)
+ val histogramA = kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 10000, 2))
+ val histogramB = kamon.userMetrics.histogram("histogram-with-settings", DynamicRange(1, 50000, 2))
histogramA shouldBe theSameInstanceAs(histogramB)
}
"allow registering a Histogram that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
+ kamon.userMetrics.histogram("histogram-with-default-configuration")
}
"allow registering a Counter and get the same Counter if registering again" in {
- val counterA = Kamon(UserMetrics).registerCounter("counter")
- val counterB = Kamon(UserMetrics).registerCounter("counter")
+ val counterA = kamon.userMetrics.counter("counter")
+ val counterB = kamon.userMetrics.counter("counter")
counterA shouldBe theSameInstanceAs(counterB)
}
"allow registering a fully configured MinMaxCounter and get the same MinMaxCounter if registering again" in {
- val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
+ val minMaxCounterA = kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
}
"return the original MinMaxCounter when registering a fully configured MinMaxCounter for second time but with different settings" in {
- val minMaxCounterA = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val minMaxCounterB = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Fine, 5000L, 1 second)
+ val minMaxCounterA = kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 10000, 2), 1 second)
+ val minMaxCounterB = kamon.userMetrics.minMaxCounter("min-max-counter-with-settings", DynamicRange(1, 50000, 2), 1 second)
minMaxCounterA shouldBe theSameInstanceAs(minMaxCounterB)
}
"allow registering a MinMaxCounter that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-default-configuration")
+ kamon.userMetrics.minMaxCounter("min-max-counter-with-default-configuration")
}
"allow registering a fully configured Gauge and get the same Gauge if registering again" in {
- val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeA = kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
- val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeB = kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
gaugeA shouldBe theSameInstanceAs(gaugeB)
}
"return the original Gauge when registering a fully configured Gauge for second time but with different settings" in {
- val gaugeA = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Normal, 1000L, 1 second) {
+ val gaugeA = kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
- val gaugeB = Kamon(UserMetrics).registerGauge("gauge-with-settings", Histogram.Precision.Fine, 5000L, 1 second) {
+ val gaugeB = kamon.userMetrics.gauge("gauge-with-settings", DynamicRange(1, 10000, 2), 1 second, {
() ⇒ 1L
- }
+ })
gaugeA shouldBe theSameInstanceAs(gaugeB)
}
"allow registering a Gauge that takes the default configuration from the kamon.metrics.precision settings" in {
- Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") {
+ kamon.userMetrics.gauge("gauge-with-default-configuration", {
() ⇒ 2L
- }
+ })
}
"allow un-registering user metrics" in {
- val metricsExtension = Kamon(Metrics)
- Kamon(UserMetrics).registerCounter("counter-for-remove")
- Kamon(UserMetrics).registerHistogram("histogram-for-remove")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-remove")
- Kamon(UserMetrics).registerGauge("gauge-for-remove") { () ⇒ 2L }
-
- metricsExtension.storage.keys should contain(UserCounter("counter-for-remove"))
- metricsExtension.storage.keys should contain(UserHistogram("histogram-for-remove"))
- metricsExtension.storage.keys should contain(UserMinMaxCounter("min-max-counter-for-remove"))
- metricsExtension.storage.keys should contain(UserGauge("gauge-for-remove"))
-
- Kamon(UserMetrics).removeCounter("counter-for-remove")
- Kamon(UserMetrics).removeHistogram("histogram-for-remove")
- Kamon(UserMetrics).removeMinMaxCounter("min-max-counter-for-remove")
- Kamon(UserMetrics).removeGauge("gauge-for-remove")
-
- metricsExtension.storage.keys should not contain (UserCounter("counter-for-remove"))
- metricsExtension.storage.keys should not contain (UserHistogram("histogram-for-remove"))
- metricsExtension.storage.keys should not contain (UserMinMaxCounter("min-max-counter-for-remove"))
- metricsExtension.storage.keys should not contain (UserGauge("gauge-for-remove"))
- }
-
- "include all the registered metrics in the a tick snapshot and reset all recorders" in {
- Kamon(Metrics).subscribe(UserHistograms, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserCounters, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserMinMaxCounters, "*", testActor, permanently = true)
- Kamon(Metrics).subscribe(UserGauges, "*", testActor, permanently = true)
-
- val histogramWithSettings = Kamon(UserMetrics).registerHistogram("histogram-with-settings", Histogram.Precision.Normal, 10000L)
- val histogramWithDefaultConfiguration = Kamon(UserMetrics).registerHistogram("histogram-with-default-configuration")
- val counter = Kamon(UserMetrics).registerCounter("counter")
- val minMaxCounterWithSettings = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-with-settings", Histogram.Precision.Normal, 1000L, 1 second)
- val gauge = Kamon(UserMetrics).registerGauge("gauge-with-default-configuration") { () ⇒ 2L }
-
- // lets put some values on those metrics
- histogramWithSettings.record(10)
- histogramWithSettings.record(20, 100)
- histogramWithDefaultConfiguration.record(40)
-
- counter.increment()
- counter.increment(16)
-
- minMaxCounterWithSettings.increment(43)
- minMaxCounterWithSettings.decrement()
-
- gauge.record(15)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- val firstSnapshot = expectMsgType[TickMetricSnapshot].metrics
-
- firstSnapshot.keys should contain allOf (
- UserHistogram("histogram-with-settings"),
- UserHistogram("histogram-with-default-configuration"))
-
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (10)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (20)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(101)
- firstSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(10, 1),
- MutableRecord(20, 100))
-
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (40)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (40)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1)
- firstSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(40, 1))
-
- firstSnapshot(UserCounter("counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(17)
-
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (43)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(0, 1), // min
- MutableRecord(42, 1), // current
- MutableRecord(43, 1)) // max
-
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- firstSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(0, 3)) // min, max and current
-
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (15)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (15)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(1)
- firstSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(15, 1)) // only the manually recorded value
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- val secondSnapshot = expectMsgType[TickMetricSnapshot].metrics
-
- secondSnapshot.keys should contain allOf (
- UserHistogram("histogram-with-settings"),
- UserHistogram("histogram-with-default-configuration"))
-
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserHistogram("histogram-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserHistogram("histogram-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- secondSnapshot(UserCounter("counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(0)
-
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (42)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (42)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-settings")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(42, 3)) // max
-
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(3)
- secondSnapshot(UserMinMaxCounter("min-max-counter-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain only (
- MutableRecord(0, 3)) // min, max and current
-
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(0)
- secondSnapshot(UserGauge("gauge-with-default-configuration")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream shouldBe empty
-
- Kamon(Metrics).unsubscribe(testActor)
- }
-
- "generate a snapshot that can be merged with another" in {
- val buffer = system.actorOf(TickMetricSnapshotBuffer.props(1 hours, testActor))
- Kamon(Metrics).subscribe(UserHistograms, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserCounters, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserMinMaxCounters, "*", buffer, permanently = true)
- Kamon(Metrics).subscribe(UserGauges, "*", buffer, permanently = true)
-
- val histogram = Kamon(UserMetrics).registerHistogram("histogram-for-merge")
- val counter = Kamon(UserMetrics).registerCounter("counter-for-merge")
- val minMaxCounter = Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-for-merge")
- val gauge = Kamon(UserMetrics).registerGauge("gauge-for-merge") { () ⇒ 10L }
-
- histogram.record(100)
- counter.increment(10)
- minMaxCounter.increment(50)
- minMaxCounter.decrement(10)
- gauge.record(50)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- Thread.sleep(2000) // Make sure that the snapshots are taken before proceeding
-
- val extraCounter = Kamon(UserMetrics).registerCounter("extra-counter")
- histogram.record(200)
- extraCounter.increment(20)
- minMaxCounter.increment(40)
- minMaxCounter.decrement(50)
- gauge.record(70)
-
- Kamon(Metrics).subscriptions ! Subscriptions.FlushMetrics
- Thread.sleep(2000) // Make sure that the metrics are buffered.
- buffer ! TickMetricSnapshotBuffer.FlushBuffer
- val snapshot = expectMsgType[TickMetricSnapshot].metrics
-
- snapshot.keys should contain(UserHistogram("histogram-for-merge"))
-
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (100)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (200)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(2)
- snapshot(UserHistogram("histogram-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(100, 1),
- MutableRecord(200, 1))
-
- snapshot(UserCounter("counter-for-merge")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(10)
- snapshot(UserCounter("extra-counter")).metrics(Count).asInstanceOf[Counter.Snapshot].count should be(20)
-
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (0)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (80)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(6)
- snapshot(UserMinMaxCounter("min-max-counter-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(0, 1), // min in first snapshot
- MutableRecord(30, 2), // min and current in second snapshot
- MutableRecord(40, 1), // current in first snapshot
- MutableRecord(50, 1), // max in first snapshot
- MutableRecord(80, 1)) // max in second snapshot
-
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].min shouldBe (50)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].max shouldBe (70)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].numberOfMeasurements should be(2)
- snapshot(UserGauge("gauge-for-merge")).metrics(RecordedValues).asInstanceOf[Histogram.Snapshot].recordsIterator.toStream should contain allOf (
- MutableRecord(50, 1),
- MutableRecord(70, 1))
-
- Kamon(Metrics).unsubscribe(testActor)
+ val counter = kamon.userMetrics.counter("counter-for-remove")
+ val histogram = kamon.userMetrics.histogram("histogram-for-remove")
+ val minMaxCounter = kamon.userMetrics.minMaxCounter("min-max-counter-for-remove")
+ val gauge = kamon.userMetrics.gauge("gauge-for-remove", { () ⇒ 2L })
+
+ kamon.userMetrics.removeCounter("counter-for-remove")
+ kamon.userMetrics.removeHistogram("histogram-for-remove")
+ kamon.userMetrics.removeMinMaxCounter("min-max-counter-for-remove")
+ kamon.userMetrics.removeGauge("gauge-for-remove")
+
+ counter should not be (theSameInstanceAs(kamon.userMetrics.counter("counter-for-remove")))
+ histogram should not be (theSameInstanceAs(kamon.userMetrics.histogram("histogram-for-remove")))
+ minMaxCounter should not be (theSameInstanceAs(kamon.userMetrics.minMaxCounter("min-max-counter-for-remove")))
+ gauge should not be (theSameInstanceAs(kamon.userMetrics.gauge("gauge-for-remove", { () ⇒ 2L })))
}
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
index 1a93e1f6..500a69c5 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/CounterSpec.scala
@@ -2,7 +2,6 @@ package kamon.metric.instrument
import java.nio.LongBuffer
-import kamon.metric.CollectionContext
import org.scalatest.{ Matchers, WordSpec }
class CounterSpec extends WordSpec with Matchers {
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
index 9192d999..bd39652c 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/GaugeSpec.scala
@@ -1,72 +1,62 @@
package kamon.metric.instrument
import java.util.concurrent.atomic.AtomicLong
-
-import akka.actor.ActorSystem
-import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.{ Metrics, Scale, CollectionContext }
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.metric.instrument.Histogram.DynamicRange
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class GaugeSpec extends WordSpecLike with Matchers {
- implicit val system = ActorSystem("gauge-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | flush-interval = 1 hour
- | default-collection-context-buffer-size = 10
- | precision {
- | default-gauge-precision {
- | refresh-interval = 100 milliseconds
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class GaugeSpec extends BaseKamonSpec("gauge-spec") {
"a Gauge" should {
- "automatically record the current value using the configured refresh-interval" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
-
+ "automatically record the current value using the configured refresh-interval" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
+
numberOfValuesRecorded.get() should be(10L +- 1L)
gauge.cleanup
}
- "stop automatically recording after a call to cleanup" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
-
+ "stop automatically recording after a call to cleanup" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
+
gauge.cleanup
numberOfValuesRecorded.get() should be(10L +- 1L)
Thread.sleep(1.second.toMillis)
+
numberOfValuesRecorded.get() should be(10L +- 1L)
}
- "produce a Histogram snapshot including all the recorded values" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge.fromDefaultConfig(system) { () ⇒ numberOfValuesRecorded.addAndGet(1) }
+ "produce a Histogram snapshot including all the recorded values" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge()
Thread.sleep(1.second.toMillis)
gauge.cleanup
- val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+ val snapshot = gauge.collect(kamon.metrics.buildDefaultCollectionContext)
snapshot.numberOfMeasurements should be(10L +- 1L)
snapshot.min should be(1)
snapshot.max should be(10L +- 1L)
}
- "not record the current value when doing a collection" in {
- val numberOfValuesRecorded = new AtomicLong(0)
- val gauge = Gauge(Histogram.Precision.Normal, 10000L, Scale.Unit, 1 hour, system)(() ⇒ numberOfValuesRecorded.addAndGet(1))
-
- val snapshot = gauge.collect(Kamon(Metrics).buildDefaultCollectionContext)
+ "not record the current value when doing a collection" in new GaugeFixture {
+ val (numberOfValuesRecorded, gauge) = createGauge(10 seconds)
+ val snapshot = gauge.collect(kamon.metrics.buildDefaultCollectionContext)
snapshot.numberOfMeasurements should be(0)
numberOfValuesRecorded.get() should be(0)
}
}
+
+ trait GaugeFixture {
+ def createGauge(refreshInterval: FiniteDuration = 100 millis): (AtomicLong, Gauge) = {
+ val recordedValuesCounter = new AtomicLong(0)
+ val gauge = Gauge(DynamicRange(1, 100, 2), refreshInterval, kamon.metrics.settings.refreshScheduler, {
+ () ⇒ recordedValuesCounter.addAndGet(1)
+ })
+
+ (recordedValuesCounter, gauge)
+ }
+
+ }
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
index c3060d4a..9a50e149 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/HistogramSpec.scala
@@ -18,22 +18,13 @@ package kamon.metric.instrument
import java.nio.LongBuffer
-import com.typesafe.config.ConfigFactory
-import kamon.metric.CollectionContext
+import kamon.metric.instrument.Histogram.DynamicRange
import org.scalatest.{ Matchers, WordSpec }
import scala.util.Random
class HistogramSpec extends WordSpec with Matchers {
- val histogramConfig = ConfigFactory.parseString(
- """
- |
- |highest-trackable-value = 100000
- |significant-value-digits = 2
- |
- """.stripMargin)
-
"a Histogram" should {
"allow record values within the configured range" in new HistogramFixture {
histogram.record(1000)
@@ -109,7 +100,7 @@ class HistogramSpec extends WordSpec with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- val histogram = Histogram.fromConfig(histogramConfig)
+ val histogram = Histogram(DynamicRange(1, 100000, 2))
def takeSnapshot(): Histogram.Snapshot = histogram.collect(collectionContext)
}
@@ -119,17 +110,20 @@ class HistogramSpec extends WordSpec with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(10000)
}
- val controlHistogram = Histogram.fromConfig(histogramConfig)
- val histogramA = Histogram.fromConfig(histogramConfig)
- val histogramB = Histogram.fromConfig(histogramConfig)
+ val controlHistogram = Histogram(DynamicRange(1, 100000, 2))
+ val histogramA = Histogram(DynamicRange(1, 100000, 2))
+ val histogramB = Histogram(DynamicRange(1, 100000, 2))
+
+ def takeSnapshotFrom(histogram: Histogram): InstrumentSnapshot = histogram.collect(collectionContext)
- def takeSnapshotFrom(histogram: Histogram): Histogram.Snapshot = histogram.collect(collectionContext)
+ def assertEquals(left: InstrumentSnapshot, right: InstrumentSnapshot): Unit = {
+ val leftSnapshot = left.asInstanceOf[Histogram.Snapshot]
+ val rightSnapshot = right.asInstanceOf[Histogram.Snapshot]
- def assertEquals(left: Histogram.Snapshot, right: Histogram.Snapshot): Unit = {
- left.numberOfMeasurements should equal(right.numberOfMeasurements)
- left.min should equal(right.min)
- left.max should equal(right.max)
- left.recordsIterator.toStream should contain theSameElementsAs (right.recordsIterator.toStream)
+ leftSnapshot.numberOfMeasurements should equal(rightSnapshot.numberOfMeasurements)
+ leftSnapshot.min should equal(rightSnapshot.min)
+ leftSnapshot.max should equal(rightSnapshot.max)
+ leftSnapshot.recordsIterator.toStream should contain theSameElementsAs (rightSnapshot.recordsIterator.toStream)
}
}
}
diff --git a/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
index 2c11adc3..7a3d7aa3 100644
--- a/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
+++ b/kamon-core/src/test/scala/kamon/metric/instrument/MinMaxCounterSpec.scala
@@ -19,19 +19,11 @@ import java.nio.LongBuffer
import akka.actor._
import akka.testkit.TestProbe
-import com.typesafe.config.ConfigFactory
-import kamon.metric.CollectionContext
-import kamon.metric.instrument.Histogram.MutableRecord
-import org.scalatest.{ Matchers, WordSpecLike }
-
-class MinMaxCounterSpec extends WordSpecLike with Matchers {
- implicit val system = ActorSystem("min-max-counter-spec")
- val minMaxCounterConfig = ConfigFactory.parseString(
- """
- |refresh-interval = 1 hour
- |highest-trackable-value = 1000
- |significant-value-digits = 2
- """.stripMargin)
+import kamon.metric.instrument.Histogram.{ DynamicRange, MutableRecord }
+import kamon.testkit.BaseKamonSpec
+import scala.concurrent.duration._
+
+class MinMaxCounterSpec extends BaseKamonSpec("min-max-counter-spec") {
"the MinMaxCounter" should {
"track ascending tendencies" in new MinMaxCounterFixture {
@@ -104,7 +96,7 @@ class MinMaxCounterSpec extends WordSpecLike with Matchers {
workers foreach (_ ! "increment")
for (refresh ← 1 to 1000) {
collectCounterSnapshot()
- Thread.sleep(10)
+ Thread.sleep(1)
}
monitor.expectNoMsg()
@@ -117,7 +109,7 @@ class MinMaxCounterSpec extends WordSpecLike with Matchers {
val buffer: LongBuffer = LongBuffer.allocate(64)
}
- val mmCounter = MinMaxCounter.fromConfig(minMaxCounterConfig, system).asInstanceOf[PaddedMinMaxCounter]
+ val mmCounter = MinMaxCounter(DynamicRange(1, 1000, 2), 1 hour, kamon.metrics.settings.refreshScheduler)
mmCounter.cleanup // cancel the refresh schedule
def collectCounterSnapshot(): Histogram.Snapshot = mmCounter.collect(collectionContext)
diff --git a/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala b/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala
new file mode 100644
index 00000000..20fc3ed5
--- /dev/null
+++ b/kamon-core/src/test/scala/kamon/testkit/BaseKamonSpec.scala
@@ -0,0 +1,34 @@
+package kamon.testkit
+
+import akka.testkit.{ ImplicitSender, TestKitBase }
+import akka.actor.ActorSystem
+import com.typesafe.config.{ Config, ConfigFactory }
+import kamon.Kamon
+import kamon.metric.{ SubscriptionsDispatcher, EntitySnapshot, MetricsExtensionImpl }
+import kamon.trace.TraceContext
+import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
+
+abstract class BaseKamonSpec(actorSystemName: String) extends TestKitBase with WordSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {
+ lazy val kamon = Kamon(actorSystemName, config)
+ lazy val collectionContext = kamon.metrics.buildDefaultCollectionContext
+ implicit lazy val system: ActorSystem = kamon.actorSystem
+
+ def config: Config =
+ ConfigFactory.load()
+
+ def newContext(name: String): TraceContext =
+ kamon.tracer.newContext(name)
+
+ def newContext(name: String, token: String): TraceContext =
+ kamon.tracer.newContext(name, token)
+
+ def takeSnapshotOf(name: String, category: String): EntitySnapshot = {
+ val recorder = kamon.metrics.find(name, category).get
+ recorder.collect(collectionContext)
+ }
+
+ def flushSubscriptions(): Unit =
+ system.actorSelection("/user/kamon/subscriptions-dispatcher") ! SubscriptionsDispatcher.Tick
+
+ override protected def afterAll(): Unit = system.shutdown()
+}
diff --git a/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala b/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala
index cda9cad7..0cb4ce34 100644
--- a/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/SimpleTraceSpec.scala
@@ -16,58 +16,40 @@
package kamon.trace
-import akka.actor.ActorSystem
-import akka.testkit.{ ImplicitSender, TestKitBase }
import com.typesafe.config.ConfigFactory
import kamon.Kamon
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
import scala.concurrent.duration._
-class SimpleTraceSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("simple-trace-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 second
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- |
- |kamon.trace {
- | level = simple-trace
- | sampling = all
- |}
- """.stripMargin))
+class SimpleTraceSpec extends BaseKamonSpec("simple-trace-spec") {
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | metric {
+ | tick-interval = 1 hour
+ | }
+ |
+ | trace {
+ | level-of-detail = simple-trace
+ | sampling = all
+ | }
+ |}
+ """.stripMargin)
"the simple tracing" should {
"send a TraceInfo when the trace has finished and all segments are finished" in {
- Kamon(Trace)(system).subscribe(testActor)
+ Kamon(Tracer)(system).subscribe(testActor)
- TraceRecorder.withNewTraceContext("simple-trace-without-segments") {
- TraceRecorder.currentContext.startSegment("segment-one", "test-segment", "test").finish()
- TraceRecorder.currentContext.startSegment("segment-two", "test-segment", "test").finish()
- TraceRecorder.finish()
+ TraceContext.withContext(newContext("simple-trace-without-segments")) {
+ TraceContext.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ TraceContext.currentContext.startSegment("segment-two", "test-segment", "test").finish()
+ TraceContext.currentContext.finish()
}
val traceInfo = expectMsgType[TraceInfo]
- Kamon(Trace)(system).unsubscribe(testActor)
+ Kamon(Tracer)(system).unsubscribe(testActor)
traceInfo.name should be("simple-trace-without-segments")
traceInfo.segments.size should be(2)
@@ -76,12 +58,12 @@ class SimpleTraceSpec extends TestKitBase with WordSpecLike with Matchers with I
}
"incubate the tracing context if there are open segments after finishing" in {
- Kamon(Trace)(system).subscribe(testActor)
+ Kamon(Tracer)(system).subscribe(testActor)
- val secondSegment = TraceRecorder.withNewTraceContext("simple-trace-without-segments") {
- TraceRecorder.currentContext.startSegment("segment-one", "test-segment", "test").finish()
- val segment = TraceRecorder.currentContext.startSegment("segment-two", "test-segment", "test")
- TraceRecorder.finish()
+ val secondSegment = TraceContext.withContext(newContext("simple-trace-without-segments")) {
+ TraceContext.currentContext.startSegment("segment-one", "test-segment", "test").finish()
+ val segment = TraceContext.currentContext.startSegment("segment-two", "test-segment", "test")
+ TraceContext.currentContext.finish()
segment
}
@@ -90,7 +72,7 @@ class SimpleTraceSpec extends TestKitBase with WordSpecLike with Matchers with I
within(10 seconds) {
val traceInfo = expectMsgType[TraceInfo]
- Kamon(Trace)(system).unsubscribe(testActor)
+ Kamon(Tracer)(system).unsubscribe(testActor)
traceInfo.name should be("simple-trace-without-segments")
traceInfo.segments.size should be(2)
diff --git a/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
index 0875deff..9d7725b7 100644
--- a/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/TraceContextManipulationSpec.scala
@@ -1,94 +1,80 @@
package kamon.trace
-import akka.actor.ActorSystem
-import akka.testkit.{ ImplicitSender, TestKitBase }
import com.typesafe.config.ConfigFactory
-import org.scalatest.{ Matchers, WordSpecLike }
+import kamon.testkit.BaseKamonSpec
-class TraceContextManipulationSpec extends TestKitBase with WordSpecLike with Matchers with ImplicitSender {
- implicit lazy val system: ActorSystem = ActorSystem("trace-metrics-spec", ConfigFactory.parseString(
- """
- |kamon.metrics {
- | tick-interval = 1 hour
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = [ "non-tracked-trace"]
- | }
- | }
- | ]
- | precision {
- | default-histogram-precision {
- | highest-trackable-value = 3600000000000
- | significant-value-digits = 2
- | }
- |
- | default-min-max-counter-precision {
- | refresh-interval = 1 second
- | highest-trackable-value = 999999999
- | significant-value-digits = 2
- | }
- | }
- |}
- """.stripMargin))
+class TraceContextManipulationSpec extends BaseKamonSpec("trace-metrics-spec") {
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |
+ | filters {
+ | trace {
+ | includes = [ "*" ]
+ | excludes = [ "non-tracked-trace"]
+ | }
+ | }
+ |}
+ """.stripMargin)
- "the TraceRecorder api" should {
+ "the TraceContext api" should {
"allow starting a trace within a specified block of code, and only within that block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("start-context") {
- TraceRecorder.currentContext should not be empty
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("start-context")) {
+ TraceContext.currentContext should not be empty
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("start-context")
}
"allow starting a trace within a specified block of code, providing a trace-token and only within that block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("start-context-with-token", Some("token-1")) {
- TraceRecorder.currentContext should not be empty
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("start-context-with-token", "token-1")) {
+ TraceContext.currentContext should not be empty
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("start-context-with-token")
createdContext.token should be("token-1")
}
"allow providing a TraceContext and make it available within a block of code" in {
- val createdContext = TraceRecorder.withNewTraceContext("manually-provided-trace-context") { TraceRecorder.currentContext }
+ val createdContext = newContext("manually-provided-trace-context")
- TraceRecorder.currentContext shouldBe empty
- TraceRecorder.withTraceContext(createdContext) {
- TraceRecorder.currentContext should be(createdContext)
+ TraceContext.currentContext shouldBe empty
+ TraceContext.withContext(createdContext) {
+ TraceContext.currentContext should be(createdContext)
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
}
"allow renaming a trace" in {
- val createdContext = TraceRecorder.withNewTraceContext("trace-before-rename") {
- TraceRecorder.rename("renamed-trace")
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("trace-before-rename")) {
+ TraceContext.currentContext.rename("renamed-trace")
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("renamed-trace")
}
"allow creating a segment within a trace" in {
- val createdContext = TraceRecorder.withNewTraceContext("trace-with-segments") {
- val segment = TraceRecorder.currentContext.startSegment("segment-1", "segment-1-category", "segment-library")
- TraceRecorder.currentContext
+ val createdContext = TraceContext.withContext(newContext("trace-with-segments")) {
+ val segment = TraceContext.currentContext.startSegment("segment-1", "segment-1-category", "segment-library")
+ TraceContext.currentContext
}
- TraceRecorder.currentContext shouldBe empty
+ TraceContext.currentContext shouldBe empty
createdContext.name shouldBe ("trace-with-segments")
}
"allow renaming a segment" in {
- TraceRecorder.withNewTraceContext("trace-with-renamed-segment") {
- val segment = TraceRecorder.currentContext.startSegment("original-segment-name", "segment-label", "segment-library")
+ TraceContext.withContext(newContext("trace-with-renamed-segment")) {
+ val segment = TraceContext.currentContext.startSegment("original-segment-name", "segment-label", "segment-library")
segment.name should be("original-segment-name")
segment.rename("new-segment-name")
diff --git a/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala b/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
index f2b25820..8bacca83 100644
--- a/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
+++ b/kamon-core/src/test/scala/kamon/trace/TraceLocalSpec.scala
@@ -16,24 +16,21 @@
package kamon.trace
-import akka.actor.ActorSystem
-import akka.testkit.TestKit
+import kamon.testkit.BaseKamonSpec
import kamon.trace.TraceLocal.AvailableToMdc
import kamon.trace.logging.MdcKeysSupport
import org.scalatest.concurrent.PatienceConfiguration
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
+import org.scalatest.OptionValues
import org.slf4j.MDC
-class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordSpecLike with Matchers
- with PatienceConfiguration with OptionValues with MdcKeysSupport {
-
+class TraceLocalSpec extends BaseKamonSpec("trace-local-spec") with PatienceConfiguration with OptionValues with MdcKeysSupport {
val SampleTraceLocalKeyAvailableToMDC = AvailableToMdc("someKey")
object SampleTraceLocalKey extends TraceLocal.TraceLocalKey { type ValueType = String }
"the TraceLocal storage" should {
"allow storing and retrieving values" in {
- TraceRecorder.withNewTraceContext("store-and-retrieve-trace-local") {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local")) {
val testString = "Hello World"
TraceLocal.store(SampleTraceLocalKey)(testString)
@@ -42,7 +39,7 @@ class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordS
}
"return None when retrieving a non existent key" in {
- TraceRecorder.withNewTraceContext("non-existent-key") {
+ TraceContext.withContext(newContext("non-existent-key")) {
TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
}
}
@@ -53,22 +50,22 @@ class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordS
"be attached to the TraceContext when it is propagated" in {
val testString = "Hello World"
- val testContext = TraceRecorder.withNewTraceContext("manually-propagated-trace-local") {
+ val testContext = TraceContext.withContext(newContext("manually-propagated-trace-local")) {
TraceLocal.store(SampleTraceLocalKey)(testString)
TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
/** No TraceLocal should be available here */
TraceLocal.retrieve(SampleTraceLocalKey) should equal(None)
- TraceRecorder.withTraceContext(testContext) {
+ TraceContext.withContext(testContext) {
TraceLocal.retrieve(SampleTraceLocalKey).value should equal(testString)
}
}
"allow retrieve a value from the MDC when was created a key with AvailableToMdc(cool-key)" in {
- TraceRecorder.withNewTraceContext("store-and-retrieve-trace-local-and-copy-to-mdc") {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
val testString = "Hello MDC"
TraceLocal.store(SampleTraceLocalKeyAvailableToMDC)(testString)
@@ -81,7 +78,7 @@ class TraceLocalSpec extends TestKit(ActorSystem("trace-local-spec")) with WordS
}
"allow retrieve a value from the MDC when was created a key with AvailableToMdc.storeForMdc(String, String)" in {
- TraceRecorder.withNewTraceContext("store-and-retrieve-trace-local-and-copy-to-mdc") {
+ TraceContext.withContext(newContext("store-and-retrieve-trace-local-and-copy-to-mdc")) {
val testString = "Hello MDC"
TraceLocal.storeForMdc("someKey", testString)
diff --git a/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala b/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala
index 83992e61..ab98d0ac 100644
--- a/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala
+++ b/kamon-core/src/test/scala/kamon/util/GlobPathFilterSpec.scala
@@ -40,6 +40,13 @@ class GlobPathFilterSpec extends WordSpecLike with Matchers {
filter.accept("/user/something/otherActor") shouldBe false
}
+ "match all expressions in the same levelss" in {
+ val filter = new GlobPathFilter("**")
+
+ filter.accept("GET: /ping") shouldBe true
+ filter.accept("GET: /ping/pong") shouldBe true
+ }
+
"match all expressions and crosses the path boundaries" in {
val filter = new GlobPathFilter("/user/actor-**")
@@ -51,7 +58,7 @@ class GlobPathFilterSpec extends WordSpecLike with Matchers {
filter.accept("/user/something/otherActor") shouldBe false
}
- "match exactly one characterr" in {
+ "match exactly one character" in {
val filter = new GlobPathFilter("/user/actor-?")
filter.accept("/user/actor-1") shouldBe true
diff --git a/kamon-datadog/src/main/resources/reference.conf b/kamon-datadog/src/main/resources/reference.conf
index de318820..4d0639c0 100644
--- a/kamon-datadog/src/main/resources/reference.conf
+++ b/kamon-datadog/src/main/resources/reference.conf
@@ -18,11 +18,12 @@ kamon {
# Subscription patterns used to select which metrics will be pushed to Datadog. Note that first, metrics
# collection for your desired entities must be activated under the kamon.metrics.filters settings.
- includes {
- actor = [ "*" ]
- trace = [ "*" ]
- dispatcher = [ "*" ]
- router = [ "*" ]
+ subscriptions {
+ trace = [ "**" ]
+ actor = [ "**" ]
+ dispatcher = [ "**" ]
+ user-metric = [ "**" ]
+ system-metric = [ "**" ]
}
# Enable system metrics
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
index 596a6765..2648d6ef 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/Datadog.scala
@@ -22,11 +22,8 @@ import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor._
import akka.event.Logging
import kamon.Kamon
-import kamon.akka.{RouterMetrics, DispatcherMetrics, ActorMetrics}
-import kamon.http.HttpServerMetrics
-import kamon.metric.UserMetrics.{ UserGauges, UserMinMaxCounters, UserCounters, UserHistograms }
+import kamon.util.ConfigTools.Syntax
import kamon.metric._
-import kamon.metrics._
import scala.collection.JavaConverters._
import scala.concurrent.duration._
@@ -34,13 +31,10 @@ import scala.concurrent.duration._
object Datadog extends ExtensionId[DatadogExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = Datadog
override def createExtension(system: ExtendedActorSystem): DatadogExtension = new DatadogExtension(system)
-
- trait MetricKeyGenerator {
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
- }
}
class DatadogExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ implicit val as = system
val log = Logging(system, classOf[DatadogExtension])
log.info("Starting the Kamon(Datadog) extension")
@@ -53,57 +47,11 @@ class DatadogExtension(system: ExtendedActorSystem) extends Kamon.Extension {
val datadogMetricsListener = buildMetricsListener(tickInterval, flushInterval)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", datadogMetricsListener, permanently = true)
-
- // Subscribe to server metrics
- Kamon(Metrics)(system).subscribe(HttpServerMetrics.category, "*", datadogMetricsListener, permanently = true)
-
- // Subscribe to Actors
- val includedActors = datadogConfig.getStringList("includes.actor").asScala
- for (actorPathPattern ← includedActors) {
- Kamon(Metrics)(system).subscribe(ActorMetrics, actorPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Routers
- val includedRouters = datadogConfig.getStringList("includes.router").asScala
- for (routerPathPattern ← includedRouters) {
- Kamon(Metrics)(system).subscribe(RouterMetrics, routerPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Traces
- val includedTraces = datadogConfig.getStringList("includes.trace").asScala
- for (tracePathPattern ← includedTraces) {
- Kamon(Metrics)(system).subscribe(TraceMetrics, tracePathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to Dispatchers
- val includedDispatchers = datadogConfig.getStringList("includes.dispatcher").asScala
- for (dispatcherPathPattern ← includedDispatchers) {
- Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, datadogMetricsListener, permanently = true)
- }
-
- // Subscribe to SystemMetrics
- val includeSystemMetrics = datadogConfig.getBoolean("report-system-metrics")
- if (includeSystemMetrics) {
- //OS
- Kamon(Metrics)(system).subscribe(CPUMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ProcessCPUMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(MemoryMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(NetworkMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(DiskMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ContextSwitchesMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(LoadAverageMetrics, "*", datadogMetricsListener, permanently = true)
-
- //JVM
- Kamon(Metrics)(system).subscribe(HeapMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(NonHeapMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ThreadMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ClassLoadingMetrics, "*", datadogMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(GCMetrics, "*", datadogMetricsListener, permanently = true)
+ val subscriptions = datadogConfig.getConfig("subscriptions")
+ subscriptions.firstLevelKeys.map { subscriptionCategory ⇒
+ subscriptions.getStringList(subscriptionCategory).asScala.foreach { pattern ⇒
+ Kamon(Metrics).subscribe(subscriptionCategory, pattern, datadogMetricsListener, permanently = true)
+ }
}
def buildMetricsListener(tickInterval: Long, flushInterval: Long): ActorRef = {
diff --git a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
index 195798fe..80d4f098 100644
--- a/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
+++ b/kamon-datadog/src/main/scala/kamon/datadog/DatadogMetricsSender.scala
@@ -20,11 +20,10 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
-import kamon.metric.UserMetrics.UserMetricGroup
import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metric.{ MetricIdentity, MetricGroupIdentity }
+import kamon.metric.{ MetricKey, Entity }
import java.util.Locale
class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long) extends Actor with UdpExtensionProvider {
@@ -68,17 +67,19 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
}
case cs: Counter.Snapshot ⇒
- val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeDatadogCounter(cs.count))
- packetBuilder.appendMeasurement(key, measurementData)
+ if (cs.count > 0) {
+ val measurementData = formatMeasurement(groupIdentity, metricIdentity, encodeDatadogCounter(cs.count))
+ packetBuilder.appendMeasurement(key, measurementData)
+ }
}
}
packetBuilder.flush()
}
- def formatMeasurement(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity, measurementData: String): String =
+ def formatMeasurement(entity: Entity, metricKey: MetricKey, measurementData: String): String =
StringBuilder.newBuilder
.append(measurementData)
- .append(buildIdentificationTag(groupIdentity, metricIdentity))
+ .append(buildIdentificationTag(entity, metricKey))
.result()
def encodeDatadogTimer(level: Long, count: Long): String = {
@@ -88,23 +89,12 @@ class DatadogMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long
def encodeDatadogCounter(count: Long): String = count.toString + "|c"
- def buildMetricName(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String =
- if (isUserMetric(groupIdentity))
- s"$appName.${groupIdentity.category.name}.${groupIdentity.name}"
- else
- s"$appName.${groupIdentity.category.name}.${metricIdentity.name}"
-
- def buildIdentificationTag(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
- if (isUserMetric(groupIdentity)) "" else {
- // Make the automatic HTTP trace names a bit more friendly
- val normalizedEntityName = groupIdentity.name.replace(": ", ":")
- s"|#${groupIdentity.category.name}:${normalizedEntityName}"
- }
- }
+ def buildMetricName(entity: Entity, metricKey: MetricKey): String =
+ s"$appName.${entity.category}.${metricKey.name}"
- def isUserMetric(groupIdentity: MetricGroupIdentity): Boolean = groupIdentity match {
- case someUserMetric: UserMetricGroup ⇒ true
- case everythingElse ⇒ false
+ def buildIdentificationTag(entity: Entity, metricKey: MetricKey): String = {
+ val normalizedEntityName = entity.name.replace(": ", ":")
+ s"|#${entity.category}:${normalizedEntityName}"
}
}
diff --git a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
index f910489a..1fcc0c5d 100644
--- a/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
+++ b/kamon-datadog/src/test/scala/kamon/datadog/DatadogMetricSenderSpec.scala
@@ -18,13 +18,13 @@ package kamon.datadog
import akka.testkit.{ TestKitBase, TestProbe }
import akka.actor.{ Props, ActorRef, ActorSystem }
-import kamon.{ MilliTimestamp, Kamon }
-import kamon.metric.instrument.Histogram.Precision
-import kamon.metric.instrument.{ Counter, Histogram, HdrHistogram, LongAdderCounter }
+import kamon.Kamon
+import kamon.metric.instrument._
+import kamon.util.MilliTimestamp
import org.scalatest.{ Matchers, WordSpecLike }
import kamon.metric._
import akka.io.Udp
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.lang.management.ManagementFactory
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
@@ -48,79 +48,69 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
"the DataDogMetricSender" should {
"send latency measurements" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
+ val (entity, testRecorder) = buildRecorder("datadog")
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.processing-time:10|ms|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-one:10|ms|#category:datadog")
}
"include the sampling rate in case of multiple measurements of the same value" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(10L)
+ val (entity, testRecorder) = buildRecorder("datadog")
+ testRecorder.metricTwo.record(10L, 2)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.processing-time:10|ms|@0.5|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-two:10|ms|@0.5|#category:datadog")
}
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
+ val (entity, testRecorder) = buildRecorder("datadog")
var bytes = 0
var level = 0
while (bytes <= testMaxPacketSize) {
level += 1
- testRecorder.record(level)
- bytes += s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon".length
+ testRecorder.metricOne.record(level)
+ bytes += s"kamon.category.metric-one:$level|ms|#category:datadog".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"kamon.actor.$testMetricName:$level|ms|#actor:user/kamon")
+ data.utf8String should be(s"kamon.category.metric-one:$level|ms|#category:datadog")
}
"render multiple keys in the same packet using newline as separator" in new UdpListenerFixture {
- val firstTestMetricName = "processing-time-1"
- val secondTestMetricName = "processing-time-2"
- val thirdTestMetricName = "counter"
+ val (entity, testRecorder) = buildRecorder("datadog")
- val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val thirdTestRecorder = Counter()
+ testRecorder.metricOne.record(10L, 2)
+ testRecorder.metricTwo.record(21L)
+ testRecorder.counterOne.increment(4L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(10L)
-
- secondTestRecorder.record(21L)
-
- thirdTestRecorder.increment(4L)
-
- val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(collectionContext),
- secondTestMetricName -> secondTestRecorder.collect(collectionContext),
- thirdTestMetricName -> thirdTestRecorder.collect(collectionContext)))
+ val udp = setup(Map(entity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be("kamon.actor.processing-time-1:10|ms|@0.5|#actor:user/kamon\nkamon.actor.processing-time-2:21|ms|#actor:user/kamon\nkamon.actor.counter:4|c|#actor:user/kamon")
+ data.utf8String should be("kamon.category.metric-one:10|ms|@0.5|#category:datadog\nkamon.category.counter:4|c|#category:datadog\nkamon.category.metric-two:21|ms|#category:datadog")
}
+
}
trait UdpListenerFixture {
val localhostName = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
val testMaxPacketSize = system.settings.config.getBytes("kamon.datadog.max-packet-size")
- def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
+ def buildRecorder(name: String): (Entity, TestEntityRecorder) = {
+ val registration = Kamon(Metrics).register(TestEntityRecorder, name).get
+ (registration.entity, registration.recorder)
+ }
+
+ def setup(metrics: Map[Entity, EntitySnapshot]): TestProbe = {
val udp = TestProbe()
val metricsSender = system.actorOf(Props(new DatadogMetricsSender(new InetSocketAddress(localhostName, 0), testMaxPacketSize) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
@@ -130,31 +120,21 @@ class DatadogMetricSenderSpec extends TestKitBase with WordSpecLike with Matcher
udp.expectMsgType[Udp.SimpleSender]
udp.reply(Udp.SimpleSenderReady)
- // These names are not intented to match the real actor metrics, it's just about seeing more familiar data in tests.
- val testGroupIdentity = new MetricGroupIdentity {
- val name: String = "user/kamon"
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = "actor"
- }
- }
-
- val testMetrics = for ((metricName, snapshot) ← metrics) yield {
- val testMetricIdentity = new MetricIdentity {
- val name: String = metricName
- val tag: String = ""
- }
-
- (testMetricIdentity, snapshot)
- }
-
- metricsSender ! TickMetricSnapshot(new MilliTimestamp(0), new MilliTimestamp(0), Map(testGroupIdentity -> new MetricGroupSnapshot {
- type GroupSnapshotType = Histogram.Snapshot
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
- }))
+ val fakeSnapshot = TickMetricSnapshot(MilliTimestamp.now, MilliTimestamp.now, metrics)
+ metricsSender ! fakeSnapshot
udp
}
}
}
+
+class TestEntityRecorder(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val metricOne = histogram("metric-one")
+ val metricTwo = histogram("metric-two")
+ val counterOne = counter("counter")
+}
+
+object TestEntityRecorder extends EntityRecorderFactory[TestEntityRecorder] {
+ def category: String = "category"
+ def createRecorder(instrumentFactory: InstrumentFactory): TestEntityRecorder = new TestEntityRecorder(instrumentFactory)
+}
diff --git a/kamon-jdbc/src/main/resources/reference.conf b/kamon-jdbc/src/main/resources/reference.conf
index 7c905f0b..e697d58c 100644
--- a/kamon-jdbc/src/main/resources/reference.conf
+++ b/kamon-jdbc/src/main/resources/reference.conf
@@ -15,15 +15,4 @@ kamon {
# Fully qualified name of the implementation of kamon.jdbc.JdbcNameGenerator that will be used for assigning names to segments.
name-generator = kamon.jdbc.DefaultJdbcNameGenerator
}
-
- metrics {
- precision {
- jdbc {
- statements {
- reads = ${kamon.metrics.precision.default-histogram-precision}
- writes = ${kamon.metrics.precision.default-histogram-precision}
- }
- }
- }
- }
} \ No newline at end of file
diff --git a/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala b/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala
index b2f35c6c..386cf019 100644
--- a/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala
+++ b/kamon-jdbc/src/main/scala/kamon/jdbc/instrumentation/StatementInstrumentation.scala
@@ -17,13 +17,10 @@ package kamon.jdbc.instrumentation
import java.util.concurrent.TimeUnit.{ NANOSECONDS ⇒ nanos }
-import akka.actor.ActorSystem
-import kamon.Kamon
-import kamon.jdbc.Jdbc
+import kamon.jdbc.{ JdbcExtension, Jdbc }
import kamon.jdbc.metric.StatementsMetrics
-import kamon.jdbc.metric.StatementsMetricsGroupFactory.GroupRecorder
import kamon.metric.Metrics
-import kamon.trace.{ TraceContext, SegmentCategory, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation.{ Around, Aspect, Pointcut }
import org.slf4j.LoggerFactory
@@ -46,15 +43,16 @@ class StatementInstrumentation {
@Around("onExecuteStatement(sql) || onExecutePreparedStatement(sql) || onExecutePreparedCall(sql)")
def aroundExecuteStatement(pjp: ProceedingJoinPoint, sql: String): Any = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
-
- implicit val statementRecorder: Option[GroupRecorder] = Kamon(Metrics)(system).register(StatementsMetrics(Statements), StatementsMetrics.Factory)
+ TraceContext.map { ctx ⇒
+ val metricsExtension = ctx.lookupExtension(Metrics)
+ val jdbcExtension = ctx.lookupExtension(Jdbc)
+ implicit val statementRecorder = metricsExtension.register(StatementsMetrics, "jdbc-statements").map(_.recorder)
sql.replaceAll(CommentPattern, Empty) match {
- case SelectStatement(_) ⇒ withSegment(ctx, system, Select)(recordRead(pjp, sql, system))
- case InsertStatement(_) ⇒ withSegment(ctx, system, Insert)(recordWrite(pjp, sql, system))
- case UpdateStatement(_) ⇒ withSegment(ctx, system, Update)(recordWrite(pjp, sql, system))
- case DeleteStatement(_) ⇒ withSegment(ctx, system, Delete)(recordWrite(pjp, sql, system))
+ case SelectStatement(_) ⇒ withSegment(ctx, Select, jdbcExtension)(recordRead(pjp, sql, jdbcExtension))
+ case InsertStatement(_) ⇒ withSegment(ctx, Insert, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
+ case UpdateStatement(_) ⇒ withSegment(ctx, Update, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
+ case DeleteStatement(_) ⇒ withSegment(ctx, Delete, jdbcExtension)(recordWrite(pjp, sql, jdbcExtension))
case anythingElse ⇒
log.debug(s"Unable to parse sql [$sql]")
pjp.proceed()
@@ -67,27 +65,27 @@ class StatementInstrumentation {
try thunk finally timeSpent(System.nanoTime() - start)
}
- def withSegment[A](ctx: TraceContext, system: ActorSystem, statement: String)(thunk: ⇒ A): A = {
- val segmentName = Jdbc(system).generateJdbcSegmentName(statement)
+ def withSegment[A](ctx: TraceContext, statement: String, jdbcExtension: JdbcExtension)(thunk: ⇒ A): A = {
+ val segmentName = jdbcExtension.generateJdbcSegmentName(statement)
val segment = ctx.startSegment(segmentName, SegmentCategory.Database, Jdbc.SegmentLibraryName)
try thunk finally segment.finish()
}
- def recordRead(pjp: ProceedingJoinPoint, sql: String, system: ActorSystem)(implicit statementRecorder: Option[GroupRecorder]): Any = {
- withTimeSpent(pjp.proceedWithErrorHandler(sql, system)) { timeSpent ⇒
+ def recordRead(pjp: ProceedingJoinPoint, sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
+ withTimeSpent(pjp.proceedWithErrorHandler(sql, jdbcExtension)) { timeSpent ⇒
statementRecorder.map(stmr ⇒ stmr.reads.record(timeSpent))
val timeSpentInMillis = nanos.toMillis(timeSpent)
- if (timeSpentInMillis >= Jdbc(system).slowQueryThreshold) {
- statementRecorder.map(stmr ⇒ stmr.slow.increment())
- Jdbc(system).processSlowQuery(sql, timeSpentInMillis)
+ if (timeSpentInMillis >= jdbcExtension.slowQueryThreshold) {
+ statementRecorder.map(stmr ⇒ stmr.slows.increment())
+ jdbcExtension.processSlowQuery(sql, timeSpentInMillis)
}
}
}
- def recordWrite(pjp: ProceedingJoinPoint, sql: String, system: ActorSystem)(implicit statementRecorder: Option[GroupRecorder]): Any = {
- withTimeSpent(pjp.proceedWithErrorHandler(sql, system)) { timeSpent ⇒
+ def recordWrite(pjp: ProceedingJoinPoint, sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
+ withTimeSpent(pjp.proceedWithErrorHandler(sql, jdbcExtension)) { timeSpent ⇒
statementRecorder.map(stmr ⇒ stmr.writes.record(timeSpent))
}
}
@@ -109,12 +107,12 @@ object StatementInstrumentation {
val Delete = "Delete"
implicit class PimpedProceedingJoinPoint(pjp: ProceedingJoinPoint) {
- def proceedWithErrorHandler(sql: String, system: ActorSystem)(implicit statementRecorder: Option[GroupRecorder]): Any = {
+ def proceedWithErrorHandler(sql: String, jdbcExtension: JdbcExtension)(implicit statementRecorder: Option[StatementsMetrics]): Any = {
try {
pjp.proceed()
} catch {
case NonFatal(cause) ⇒
- Jdbc(system).processSqlError(sql, cause)
+ jdbcExtension.processSqlError(sql, cause)
statementRecorder.map(stmr ⇒ stmr.errors.increment())
throw cause
}
diff --git a/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala b/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala
index 7ba8b105..e1d6689c 100644
--- a/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala
+++ b/kamon-jdbc/src/main/scala/kamon/jdbc/metric/StatementsMetrics.scala
@@ -16,68 +16,17 @@
package kamon.jdbc.metric
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric.{ CollectionContext, MetricGroupCategory, MetricGroupFactory, MetricGroupIdentity, MetricGroupRecorder, MetricGroupSnapshot, MetricIdentity, MetricSnapshot }
-import kamon.metric.instrument.{ Counter, Histogram }
-
-case class StatementsMetrics(name: String) extends MetricGroupIdentity {
- val category = StatementsMetrics
-}
-
-object StatementsMetrics extends MetricGroupCategory {
- val name = "jdbc-statements"
-
- case object Writes extends MetricIdentity { val name = "writes" }
- case object Reads extends MetricIdentity { val name = "reads" }
- case object Slows extends MetricIdentity { val name = "slow-queries" }
- case object Errors extends MetricIdentity { val name = "errors" }
-
- case class StatementsMetricsRecorder(writes: Histogram, reads: Histogram, slow: Counter, errors: Counter)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- StatementsMetricsSnapshot(writes.collect(context), reads.collect(context), slow.collect(context), errors.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class StatementsMetricsSnapshot(writes: Histogram.Snapshot, reads: Histogram.Snapshot, slows: Counter.Snapshot, errors: Counter.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = StatementsMetricsSnapshot
-
- def merge(that: StatementsMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
- StatementsMetricsSnapshot(writes.merge(that.writes, context), reads.merge(that.reads, context), slows.merge(that.slows, context), errors.merge(that.errors, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Writes -> writes,
- Reads -> reads,
- Slows -> slows,
- Reads -> errors)
- }
-
- val Factory = StatementsMetricsGroupFactory
-}
-
-case object StatementsMetricsGroupFactory extends MetricGroupFactory {
- import kamon.jdbc.metric.StatementsMetrics._
-
- type GroupRecorder = StatementsMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jdbc.statements")
-
- val writesConfig = settings.getConfig("writes")
- val readsConfig = settings.getConfig("reads")
-
- new StatementsMetricsRecorder(
- Histogram.fromConfig(writesConfig),
- Histogram.fromConfig(readsConfig),
- Counter(),
- Counter())
- }
+import kamon.metric._
+import kamon.metric.instrument.{ Time, InstrumentFactory }
+
+class StatementsMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val reads = histogram("reads", Time.Nanoseconds)
+ val writes = histogram("writes", Time.Nanoseconds)
+ val slows = counter("slows")
+ val errors = counter("errors")
}
+object StatementsMetrics extends EntityRecorderFactory[StatementsMetrics] {
+ def category: String = "jdbc-statements"
+ def createRecorder(instrumentFactory: InstrumentFactory): StatementsMetrics = new StatementsMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala b/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala
index 534edd57..e150d967 100644
--- a/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala
+++ b/kamon-jdbc/src/test/scala/kamon/jdbc/instrumentation/StatementInstrumentationSpec.scala
@@ -17,40 +17,33 @@ package kamon.jdbc.instrumentation
import java.sql.{ DriverManager, SQLException }
-import akka.actor.ActorSystem
-import akka.testkit.{ TestKitBase, TestProbe }
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
import kamon.jdbc.{ Jdbc, JdbcNameGenerator, SqlErrorProcessor, SlowQueryProcessor }
-import kamon.jdbc.metric.StatementsMetrics
-import kamon.jdbc.metric.StatementsMetrics.StatementsMetricsSnapshot
-import kamon.metric.{ TraceMetrics, Metrics }
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity, TraceRecorder }
-import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
-
-import scala.concurrent.duration._
-
-class StatementInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with BeforeAndAfterAll {
-
- implicit lazy val system: ActorSystem = ActorSystem("jdbc-spec", ConfigFactory.parseString(
- """
- |kamon {
- | jdbc {
- | slow-query-threshold = 100 milliseconds
- |
- | # Fully qualified name of the implementation of kamon.jdbc.SlowQueryProcessor.
- | slow-query-processor = kamon.jdbc.instrumentation.NoOpSlowQueryProcessor
- |
- | # Fully qualified name of the implementation of kamon.jdbc.SqlErrorProcessor.
- | sql-error-processor = kamon.jdbc.instrumentation.NoOpSqlErrorProcessor
- |
- | # Fully qualified name of the implementation of kamon.jdbc.JdbcNameGenerator
- | name-generator = kamon.jdbc.instrumentation.NoOpJdbcNameGenerator
- | }
- |}
- """.stripMargin))
+import kamon.metric.TraceMetricsSpec
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.{ SegmentCategory, TraceContext }
+
+class StatementInstrumentationSpec extends BaseKamonSpec("jdbc-spec") {
+ import TraceMetricsSpec.SegmentSyntax
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | jdbc {
+ | slow-query-threshold = 100 milliseconds
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.SlowQueryProcessor.
+ | slow-query-processor = kamon.jdbc.instrumentation.NoOpSlowQueryProcessor
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.SqlErrorProcessor.
+ | sql-error-processor = kamon.jdbc.instrumentation.NoOpSqlErrorProcessor
+ |
+ | # Fully qualified name of the implementation of kamon.jdbc.JdbcNameGenerator
+ | name-generator = kamon.jdbc.instrumentation.NoOpJdbcNameGenerator
+ | }
+ |}
+ """.stripMargin)
val connection = DriverManager.getConnection("jdbc:h2:mem:jdbc-spec", "SA", "")
@@ -67,117 +60,105 @@ class StatementInstrumentationSpec extends TestKitBase with WordSpecLike with Ma
}
"the StatementInstrumentation" should {
- "record the execution time of INSERT operation" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-insert") {
-
- val metricsListener = subscribeToMetrics()
-
+ "record the execution time of INSERT operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-insert")) {
for (id ← 1 to 100) {
val insert = s"INSERT INTO Address (Nr, Name) VALUES($id, 'foo')"
val insertStatement = connection.prepareStatement(insert)
insertStatement.execute()
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.writes.numberOfMeasurements should be(100)
-
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("jdbc-trace-insert")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
- snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("Jdbc[Insert]", SegmentCategory.Database, Jdbc.SegmentLibraryName)).numberOfMeasurements should be(100)
- }
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
- "record the execution time of SELECT operation" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-select") {
-
- val metricsListener = subscribeToMetrics()
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-insert", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Insert]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+ "record the execution time of SELECT operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-select")) {
for (id ← 1 to 100) {
val select = s"SELECT * FROM Address where Nr = $id"
val selectStatement = connection.createStatement()
selectStatement.execute(select)
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.reads.numberOfMeasurements should be(100)
-
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("jdbc-trace-select")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
- snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("Jdbc[Select]", SegmentCategory.Database, Jdbc.SegmentLibraryName)).numberOfMeasurements should be(100)
- }
-
- "record the execution time of UPDATE operation" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-update") {
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("reads").get.numberOfMeasurements should be(100)
- val metricsListener = subscribeToMetrics()
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-select", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Select]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+ "record the execution time of UPDATE operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-update")) {
for (id ← 1 to 100) {
val update = s"UPDATE Address SET Name = 'bar$id' where Nr = $id"
val updateStatement = connection.prepareStatement(update)
updateStatement.execute()
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.writes.numberOfMeasurements should be(100)
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("jdbc-trace-update")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
- snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("Jdbc[Update]", SegmentCategory.Database, Jdbc.SegmentLibraryName)).numberOfMeasurements should be(100)
- }
-
- "record the execution time of DELETE operation" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-delete") {
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
- val metricsListener = subscribeToMetrics()
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-update", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Update]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
+ }
+ "record the execution time of DELETE operation" in {
+ TraceContext.withContext(newContext("jdbc-trace-delete")) {
for (id ← 1 to 100) {
val delete = s"DELETE FROM Address where Nr = $id"
val deleteStatement = connection.createStatement()
deleteStatement.execute(delete)
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.writes.numberOfMeasurements should be(100)
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
}
- val snapshot = takeSnapshotOf("jdbc-trace-delete")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
- snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("Jdbc[Delete]", SegmentCategory.Database, Jdbc.SegmentLibraryName)).numberOfMeasurements should be(100)
- }
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.histogram("writes").get.numberOfMeasurements should be(100)
- "record the execution time of SLOW QUERIES based on the kamon.jdbc.slow-query-threshold" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-slow") {
+ val traceSnapshot = takeSnapshotOf("jdbc-trace-delete", "trace")
+ traceSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceSnapshot.segments.size should be(1)
+ traceSnapshot.segment("Jdbc[Delete]", SegmentCategory.Database, Jdbc.SegmentLibraryName).numberOfMeasurements should be(100)
- val metricsListener = subscribeToMetrics()
+ }
+ "record the execution time of SLOW QUERIES based on the kamon.jdbc.slow-query-threshold" in {
+ TraceContext.withContext(newContext("jdbc-trace-slow")) {
for (id ← 1 to 2) {
val select = s"SELECT * FROM Address; CALL SLEEP(100)"
val selectStatement = connection.createStatement()
selectStatement.execute(select)
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.slows.count should be(2)
+ TraceContext.currentContext.finish()
}
- }
- "count all SQL ERRORS" in new StatementsMetricsListenerFixture {
- TraceRecorder.withNewTraceContext("jdbc-trace-errors") {
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.counter("slows").get.count should be(2)
- val metricsListener = subscribeToMetrics()
+ }
+ "count all SQL ERRORS" in {
+ TraceContext.withContext(newContext("jdbc-trace-errors")) {
for (_ ← 1 to 10) {
intercept[SQLException] {
val error = "SELECT * FROM NO_EXISTENT_TABLE"
@@ -185,35 +166,13 @@ class StatementInstrumentationSpec extends TestKitBase with WordSpecLike with Ma
errorStatement.execute(error)
}
}
- val StatementMetrics = expectStatementsMetrics(metricsListener, 3 seconds)
- StatementMetrics.errors.count should be(10)
- }
- }
- }
- trait StatementsMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(StatementsMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ TraceContext.currentContext.finish()
+ }
- def expectStatementsMetrics(listener: TestProbe, waitTime: FiniteDuration): StatementsMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ val jdbcSnapshot = takeSnapshotOf("jdbc-statements", "jdbc-statements")
+ jdbcSnapshot.counter("errors").get.count should be(10)
}
- val statementsMetricsOption = tickSnapshot.metrics.get(StatementsMetrics(StatementInstrumentation.Statements))
- statementsMetricsOption should not be empty
- statementsMetricsOption.get.asInstanceOf[StatementsMetricsSnapshot]
- }
-
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics)(system).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics)(system).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
}
}
diff --git a/kamon-log-reporter/src/main/resources/reference.conf b/kamon-log-reporter/src/main/resources/reference.conf
index dea218eb..0f7e4e9a 100644
--- a/kamon-log-reporter/src/main/resources/reference.conf
+++ b/kamon-log-reporter/src/main/resources/reference.conf
@@ -5,8 +5,7 @@
kamon {
log-reporter {
- # Enable system metrics
- # In order to not get a ClassNotFoundException, we must register the kamon-sytem-metrics module
+ # Decide whether the log reporter should log system metrics, if available.
report-system-metrics = false
}
}
diff --git a/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
index 61d87793..dc977a52 100644
--- a/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
+++ b/kamon-log-reporter/src/main/scala/kamon/logreporter/LogReporter.scala
@@ -19,28 +19,13 @@ package kamon.logreporter
import akka.actor._
import akka.event.Logging
import kamon.Kamon
-import kamon.akka.ActorMetrics
-import ActorMetrics.ActorMetricSnapshot
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.metric.UserMetrics._
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsSnapshot
-import kamon.metrics.NetworkMetrics.NetworkMetricSnapshot
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsSnapshot
-import kamon.metrics._
-import kamon.metrics.CPUMetrics.CPUMetricSnapshot
object LogReporter extends ExtensionId[LogReporterExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = LogReporter
override def createExtension(system: ExtendedActorSystem): LogReporterExtension = new LogReporterExtension(system)
-
- trait MetricKeyGenerator {
- def localhostName: String
- def normalizedLocalhostName: String
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
- }
}
class LogReporterExtension(system: ExtendedActorSystem) extends Kamon.Extension {
@@ -48,25 +33,15 @@ class LogReporterExtension(system: ExtendedActorSystem) extends Kamon.Extension
log.info("Starting the Kamon(LogReporter) extension")
val logReporterConfig = system.settings.config.getConfig("kamon.log-reporter")
-
val subscriber = system.actorOf(Props[LogReporterSubscriber], "kamon-log-reporter")
- Kamon(Metrics)(system).subscribe(TraceMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ActorMetrics, "*", subscriber, permanently = true)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe("trace", "**", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe("actor", "**", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe("user-metrics", "**", subscriber, permanently = true)
val includeSystemMetrics = logReporterConfig.getBoolean("report-system-metrics")
-
if (includeSystemMetrics) {
- // Subscribe to SystemMetrics
- Kamon(Metrics)(system).subscribe(CPUMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ProcessCPUMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(NetworkMetrics, "*", subscriber, permanently = true)
- Kamon(Metrics)(system).subscribe(ContextSwitchesMetrics, "*", subscriber, permanently = true)
+ Kamon(Metrics)(system).subscribe("system-metric", "**", subscriber, permanently = true)
}
}
@@ -79,32 +54,25 @@ class LogReporterSubscriber extends Actor with ActorLogging {
}
def printMetricSnapshot(tick: TickMetricSnapshot): Unit = {
- // Group all the user metrics together.
- val histograms = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
- val counters = Map.newBuilder[MetricGroupIdentity, Counter.Snapshot]
- val minMaxCounters = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
- val gauges = Map.newBuilder[MetricGroupIdentity, Histogram.Snapshot]
-
tick.metrics foreach {
- case (identity, ams: ActorMetricSnapshot) ⇒ logActorMetrics(identity.name, ams)
- case (identity, tms: TraceMetricsSnapshot) ⇒ logTraceMetrics(identity.name, tms)
- case (h: UserHistogram, s: UserHistogramSnapshot) ⇒ histograms += (h -> s.histogramSnapshot)
- case (c: UserCounter, s: UserCounterSnapshot) ⇒ counters += (c -> s.counterSnapshot)
- case (m: UserMinMaxCounter, s: UserMinMaxCounterSnapshot) ⇒ minMaxCounters += (m -> s.minMaxCounterSnapshot)
- case (g: UserGauge, s: UserGaugeSnapshot) ⇒ gauges += (g -> s.gaugeSnapshot)
- case (_, cms: CPUMetricSnapshot) ⇒ logCpuMetrics(cms)
- case (_, pcms: ProcessCPUMetricsSnapshot) ⇒ logProcessCpuMetrics(pcms)
- case (_, nms: NetworkMetricSnapshot) ⇒ logNetworkMetrics(nms)
- case (_, csms: ContextSwitchesMetricsSnapshot) ⇒ logContextSwitchesMetrics(csms)
- case ignoreEverythingElse ⇒
+ case (entity, snapshot) if entity.category == "actor" ⇒ logActorMetrics(entity.name, snapshot)
+ case (entity, snapshot) if entity.category == "trace" ⇒ logTraceMetrics(entity.name, snapshot)
+ case (entity, snapshot) if entity.category == "user-metric" ⇒ logUserMetrics(snapshot)
+ case (entity, snapshot) if entity.category == "system-metric" ⇒ logSystemMetrics(entity.name, snapshot)
+ case ignoreEverythingElse ⇒
}
-
- logUserMetrics(histograms.result(), counters.result(), minMaxCounters.result(), gauges.result())
}
- def logActorMetrics(name: String, ams: ActorMetricSnapshot): Unit = {
- log.info(
- """
+ def logActorMetrics(name: String, actorSnapshot: EntitySnapshot): Unit = {
+ for {
+ processingTime ← actorSnapshot.histogram("processing-time")
+ timeInMailbox ← actorSnapshot.histogram("time-in-mailbox")
+ mailboxSize ← actorSnapshot.minMaxCounter("mailbox-size")
+ errors ← actorSnapshot.counter("errors")
+ } {
+
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Actor: %-83s |
@@ -120,46 +88,67 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- name,
- ams.processingTime.numberOfMeasurements, ams.timeInMailbox.numberOfMeasurements, ams.mailboxSize.min,
- ams.processingTime.min, ams.timeInMailbox.min, ams.mailboxSize.average,
- ams.processingTime.percentile(50.0D), ams.timeInMailbox.percentile(50.0D), ams.mailboxSize.max,
- ams.processingTime.percentile(90.0D), ams.timeInMailbox.percentile(90.0D),
- ams.processingTime.percentile(95.0D), ams.timeInMailbox.percentile(95.0D),
- ams.processingTime.percentile(99.0D), ams.timeInMailbox.percentile(99.0D), ams.errors.count,
- ams.processingTime.percentile(99.9D), ams.timeInMailbox.percentile(99.9D),
- ams.processingTime.max, ams.timeInMailbox.max))
+ .stripMargin.format(
+ name,
+ processingTime.numberOfMeasurements, timeInMailbox.numberOfMeasurements, mailboxSize.min,
+ processingTime.min, timeInMailbox.min, mailboxSize.average,
+ processingTime.percentile(50.0D), timeInMailbox.percentile(50.0D), mailboxSize.max,
+ processingTime.percentile(90.0D), timeInMailbox.percentile(90.0D),
+ processingTime.percentile(95.0D), timeInMailbox.percentile(95.0D),
+ processingTime.percentile(99.0D), timeInMailbox.percentile(99.0D), errors.count,
+ processingTime.percentile(99.9D), timeInMailbox.percentile(99.9D),
+ processingTime.max, timeInMailbox.max))
+ }
+
}
- def logCpuMetrics(cms: CPUMetricSnapshot): Unit = {
- import cms._
+ def logSystemMetrics(metric: String, snapshot: EntitySnapshot): Unit = metric match {
+ case "cpu" ⇒ logCpuMetrics(snapshot)
+ case "network" ⇒ logNetworkMetrics(snapshot)
+ case "process-cpu" ⇒ logProcessCpuMetrics(snapshot)
+ case "context-switches" ⇒ logContextSwitchesMetrics(snapshot)
+ case ignoreOthers ⇒
+ }
- log.info(
- """
+ def logCpuMetrics(cpuMetrics: EntitySnapshot): Unit = {
+ for {
+ user ← cpuMetrics.histogram("cpu-user")
+ system ← cpuMetrics.histogram("cpu-system")
+ cpuWait ← cpuMetrics.histogram("cpu-wait")
+ idle ← cpuMetrics.histogram("cpu-idle")
+ } {
+
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| CPU (ALL) |
|| |
|| User (percentage) System (percentage) Wait (percentage) Idle (percentage) |
|| Min: %-3s Min: %-3s Min: %-3s Min: %-3s |
- || Avg: %-3s Avg: %-3s Avg: %-3s Avg: %-3s |
+ || Avg: %-3s Avg: %-3s Avg: %-3s Avg: %-3s |
|| Max: %-3s Max: %-3s Max: %-3s Max: %-3s |
|| |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- user.min, system.min, cpuWait.min, idle.min,
- user.average, system.average, cpuWait.average, idle.average,
- user.max, system.max, cpuWait.max, idle.max))
+ .stripMargin.format(
+ user.min, system.min, cpuWait.min, idle.min,
+ user.average, system.average, cpuWait.average, idle.average,
+ user.max, system.max, cpuWait.max, idle.max))
+ }
}
- def logNetworkMetrics(nms: NetworkMetricSnapshot): Unit = {
- import nms._
+ def logNetworkMetrics(networkMetrics: EntitySnapshot): Unit = {
+ for {
+ rxBytes ← networkMetrics.histogram("rx-bytes")
+ txBytes ← networkMetrics.histogram("tx-bytes")
+ rxErrors ← networkMetrics.histogram("rx-errors")
+ txErrors ← networkMetrics.histogram("tx-errors")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Network (ALL) |
@@ -170,38 +159,50 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-4s Max: %-4s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- rxBytes.min, txBytes.min, rxErrors.sum, txErrors.sum,
- rxBytes.average, txBytes.average,
- rxBytes.max, txBytes.max))
+ .stripMargin.
+ format(
+ rxBytes.min, txBytes.min, rxErrors.sum, txErrors.sum,
+ rxBytes.average, txBytes.average,
+ rxBytes.max, txBytes.max))
+ }
}
- def logProcessCpuMetrics(pcms: ProcessCPUMetricsSnapshot): Unit = {
- import pcms._
+ def logProcessCpuMetrics(processCpuMetrics: EntitySnapshot): Unit = {
+ for {
+ user ← processCpuMetrics.histogram("process-user-cpu")
+ total ← processCpuMetrics.histogram("process-cpu")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Process-CPU |
|| |
- || Cpu-Percentage Total-Process-Time |
+ || User-Percentage Total-Percentage |
|| Min: %-12s Min: %-12s |
|| Avg: %-12s Avg: %-12s |
|| Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- (cpuPercent.min / 100), totalProcessTime.min,
- (cpuPercent.average / 100), totalProcessTime.average,
- (cpuPercent.max / 100), totalProcessTime.max))
+ .stripMargin.
+ format(
+ (user.min, total.min,
+ user.average, total.average,
+ user.max, total.max)))
+ }
+
}
- def logContextSwitchesMetrics(csms: ContextSwitchesMetricsSnapshot): Unit = {
- import csms._
+ def logContextSwitchesMetrics(contextSwitchMetrics: EntitySnapshot): Unit = {
+ for {
+ perProcessVoluntary ← contextSwitchMetrics.histogram("context-switches-process-voluntary")
+ perProcessNonVoluntary ← contextSwitchMetrics.histogram("context-switches-process-non-voluntary")
+ global ← contextSwitchMetrics.histogram("context-switches-global")
+ } {
- log.info(
- """
+ log.info(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Context-Switches |
@@ -212,18 +213,24 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| Max: %-12s Max: %-12s Max: %-12s |
|| |
|+--------------------------------------------------------------------------------------------------+"""
- .stripMargin.format(
- global.min, perProcessNonVoluntary.min, perProcessVoluntary.min,
- global.average, perProcessNonVoluntary.average, perProcessVoluntary.average,
- global.max, perProcessNonVoluntary.max, perProcessVoluntary.max))
+ .stripMargin.
+ format(
+ global.min, perProcessNonVoluntary.min, perProcessVoluntary.min,
+ global.average, perProcessNonVoluntary.average, perProcessVoluntary.average,
+ global.max, perProcessNonVoluntary.max, perProcessVoluntary.max))
+ }
}
- def logTraceMetrics(name: String, tms: TraceMetricsSnapshot): Unit = {
+ def logTraceMetrics(name: String, traceSnapshot: EntitySnapshot): Unit = {
val traceMetricsData = StringBuilder.newBuilder
- traceMetricsData.append(
- """
+ for {
+ elapsedTime ← traceSnapshot.histogram("elapsed-time")
+ } {
+
+ traceMetricsData.append(
+ """
|+--------------------------------------------------------------------------------------------------+
|| |
|| Trace: %-83s |
@@ -231,22 +238,26 @@ class LogReporterSubscriber extends Actor with ActorLogging {
|| |
|| Elapsed Time (nanoseconds): |
|"""
- .stripMargin.format(
- name, tms.elapsedTime.numberOfMeasurements))
-
- traceMetricsData.append(compactHistogramView(tms.elapsedTime))
- traceMetricsData.append(
- """
- || |
- |+--------------------------------------------------------------------------------------------------+"""
- .stripMargin)
-
- log.info(traceMetricsData.toString())
+ .stripMargin.format(
+ name, elapsedTime.numberOfMeasurements))
+
+ traceMetricsData.append(compactHistogramView(elapsedTime))
+ traceMetricsData.append(
+ """
+ || |
+ |+--------------------------------------------------------------------------------------------------+"""
+ .
+ stripMargin)
+
+ log.info(traceMetricsData.toString())
+ }
}
- def logUserMetrics(histograms: Map[MetricGroupIdentity, Histogram.Snapshot],
- counters: Map[MetricGroupIdentity, Counter.Snapshot], minMaxCounters: Map[MetricGroupIdentity, Histogram.Snapshot],
- gauges: Map[MetricGroupIdentity, Histogram.Snapshot]): Unit = {
+ def logUserMetrics(userMetrics: EntitySnapshot): Unit = {
+ val histograms = userMetrics.histograms
+ val minMaxCounters = userMetrics.minMaxCounters
+ val gauges = userMetrics.gauges
+ val counters = userMetrics.counters
if (histograms.isEmpty && counters.isEmpty && minMaxCounters.isEmpty && gauges.isEmpty) {
log.info("No user metrics reported")
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
index e97c24dc..551bb546 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/CustomMetricExtractor.scala
@@ -16,17 +16,17 @@
package kamon.newrelic
-import kamon.metric.UserMetrics.UserMetricGroup
-import kamon.metric._
+import kamon.metric.{ UserMetrics, EntitySnapshot, Entity }
+import kamon.metric.instrument.CollectionContext
object CustomMetricExtractor extends MetricExtractor {
- def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData] = {
- metrics.collect {
- case (mg: UserMetricGroup, groupSnapshot) ⇒
- groupSnapshot.metrics collect {
- case (name, snapshot) ⇒ Metric.fromKamonMetricSnapshot(snapshot, s"Custom/${mg.name}", None, Scale.Unit)
- }
- }.flatten.toMap
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData] = {
+ metrics.get(UserMetrics.entity).map { allUserMetrics ⇒
+ allUserMetrics.metrics.map {
+ case (key, snapshot) ⇒ Metric(snapshot, key.unitOfMeasurement, s"Custom/${key.name}", None)
+ }
+
+ } getOrElse (Map.empty)
}
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
index 0e53be0b..6e16b975 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/JsonProtocol.scala
@@ -15,7 +15,7 @@
* ========================================================== */
package kamon.newrelic
-import kamon.Timestamp
+import kamon.util.Timestamp
import spray.json._
object JsonProtocol extends DefaultJsonProtocol {
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
index 52d21f31..20204b79 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/Metric.scala
@@ -1,8 +1,8 @@
package kamon.newrelic
-import kamon.Timestamp
-import kamon.metric.instrument.{ Counter, Histogram }
-import kamon.metric.{ MetricSnapshot, Scale }
+import kamon.metric.instrument._
+import kamon.metric.MetricKey
+import kamon.util.{ MapMerge, Timestamp }
case class MetricID(name: String, scope: Option[String])
case class MetricData(callCount: Long, total: Double, totalExclusive: Double, min: Double, max: Double, sumOfSquares: Double) {
@@ -18,16 +18,23 @@ case class MetricData(callCount: Long, total: Double, totalExclusive: Double, mi
object Metric {
- def fromKamonMetricSnapshot(snapshot: MetricSnapshot, name: String, scope: Option[String], targetScale: Scale): Metric = {
+ def scaleFunction(uom: UnitOfMeasurement): Long ⇒ Double = uom match {
+ case time: Time ⇒ time.scale(Time.Seconds)
+ case other ⇒ _.toDouble
+ }
+
+ def apply(snapshot: InstrumentSnapshot, snapshotUnit: UnitOfMeasurement, name: String, scope: Option[String]): Metric = {
snapshot match {
case hs: Histogram.Snapshot ⇒
var total: Double = 0D
var sumOfSquares: Double = 0D
- val scaledMin = Scale.convert(hs.scale, targetScale, hs.min)
- val scaledMax = Scale.convert(hs.scale, targetScale, hs.max)
+ val scaler = scaleFunction(snapshotUnit)
+
+ val scaledMin = scaler(hs.min)
+ val scaledMax = scaler(hs.max)
hs.recordsIterator.foreach { record ⇒
- val scaledValue = Scale.convert(hs.scale, targetScale, record.level)
+ val scaledValue = scaler(record.level)
total += scaledValue * record.count
sumOfSquares += (scaledValue * scaledValue) * record.count
@@ -42,12 +49,12 @@ object Metric {
}
case class TimeSliceMetrics(from: Timestamp, to: Timestamp, metrics: Map[MetricID, MetricData]) {
- import kamon.metric.combineMaps
+ import MapMerge.Syntax
def merge(that: TimeSliceMetrics): TimeSliceMetrics = {
val mergedFrom = Timestamp.earlier(from, that.from)
val mergedTo = Timestamp.later(to, that.to)
- val mergedMetrics = combineMaps(metrics, that.metrics)((l, r) ⇒ l.merge(r))
+ val mergedMetrics = metrics.merge(that.metrics, (l, r) ⇒ l.merge(r))
TimeSliceMetrics(mergedFrom, mergedTo, mergedMetrics)
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
index 286b0a77..51c1ad21 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/MetricReporter.scala
@@ -6,9 +6,9 @@ import akka.actor.{ Props, ActorLogging, Actor }
import akka.pattern.pipe
import akka.io.IO
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metric.UserMetrics.{ UserGauges, UserMinMaxCounters, UserCounters, UserHistograms }
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
+import kamon.metric.instrument.CollectionContext
import kamon.newrelic.ApiMethodClient.{ AgentShutdownRequiredException, AgentRestartRequiredException }
import kamon.newrelic.MetricReporter.{ PostFailed, PostSucceeded }
import spray.can.Http
@@ -22,7 +22,7 @@ class MetricReporter(settings: AgentSettings) extends Actor with ActorLogging wi
val metricsExtension = Kamon(Metrics)(context.system)
val collectionContext = metricsExtension.buildDefaultCollectionContext
val metricsSubscriber = {
- val tickInterval = context.system.settings.config.getDuration("kamon.metrics.tick-interval", TimeUnit.MILLISECONDS)
+ val tickInterval = context.system.settings.config.getDuration("kamon.metric.tick-interval", TimeUnit.MILLISECONDS)
// Metrics are always sent to New Relic in 60 seconds intervals.
if (tickInterval == 60000) self
@@ -91,14 +91,8 @@ class MetricReporter(settings: AgentSettings) extends Actor with ActorLogging wi
}
def subscribeToMetrics(): Unit = {
- // Subscribe to Trace Metrics
- metricsExtension.subscribe(TraceMetrics, "*", metricsSubscriber, permanently = true)
-
- // Subscribe to all User Metrics
- metricsExtension.subscribe(UserHistograms, "*", metricsSubscriber, permanently = true)
- metricsExtension.subscribe(UserCounters, "*", metricsSubscriber, permanently = true)
- metricsExtension.subscribe(UserMinMaxCounters, "*", metricsSubscriber, permanently = true)
- metricsExtension.subscribe(UserGauges, "*", metricsSubscriber, permanently = true)
+ metricsExtension.subscribe("trace", "*", metricsSubscriber, permanently = true)
+ metricsExtension.subscribe("user-metrics", "*", metricsSubscriber, permanently = true)
}
}
@@ -113,5 +107,5 @@ object MetricReporter {
}
trait MetricExtractor {
- def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData]
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData]
}
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
index 56b29aff..7f56d931 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/NewRelicErrorLogger.scala
@@ -21,8 +21,8 @@ import java.util
import akka.actor.{ Actor, ActorLogging }
import akka.event.Logging.{ Error, InitializeLogger, LoggerInitialized }
import com.newrelic.api.agent.{ NewRelic ⇒ NR }
-import kamon.trace.TraceLocal.{ HttpContext, HttpContextKey }
-import kamon.trace.{ TraceLocal, TraceRecorder, TraceContextAware }
+import kamon.trace.TraceLocal.HttpContextKey
+import kamon.trace.{ TraceContext, TraceLocal, TraceContextAware }
trait CustomParamsSupport {
this: NewRelicErrorLogger ⇒
@@ -64,7 +64,7 @@ class NewRelicErrorLogger extends Actor with ActorLogging with CustomParamsSuppo
//Really ugly, but temporal hack until next release...
def runInFakeTransaction[T](thunk: ⇒ T): T = {
val oldName = Thread.currentThread.getName
- Thread.currentThread.setName(TraceRecorder.currentContext.name)
+ Thread.currentThread.setName(TraceContext.currentContext.name)
try thunk finally Thread.currentThread.setName(oldName)
}
} \ No newline at end of file
diff --git a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
index baf20434..d0144f4b 100644
--- a/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
+++ b/kamon-newrelic/src/main/scala/kamon/newrelic/WebTransactionMetricExtractor.scala
@@ -16,78 +16,81 @@
package kamon.newrelic
+import kamon.metric.{ EntitySnapshot, Entity }
+
import scala.collection.mutable
-import kamon.metric._
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.metric.instrument.Histogram
-import kamon.trace.SegmentCategory.HttpClient
-import kamon.trace.SegmentMetricIdentity
+import kamon.metric.instrument.{ Time, CollectionContext, Histogram }
object WebTransactionMetricExtractor extends MetricExtractor {
- def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[MetricGroupIdentity, MetricGroupSnapshot]): Map[MetricID, MetricData] = {
+ def extract(settings: AgentSettings, collectionContext: CollectionContext, metrics: Map[Entity, EntitySnapshot]): Map[MetricID, MetricData] = {
val apdexBuilder = new ApdexBuilder("Apdex", None, settings.apdexT)
// Trace metrics are recorded in nanoseconds.
- var accumulatedHttpDispatcher: Histogram.Snapshot = Histogram.Snapshot.empty(Scale.Nano)
- var accumulatedExternalServices: Histogram.Snapshot = Histogram.Snapshot.empty(Scale.Nano)
+ var accumulatedHttpDispatcher: Histogram.Snapshot = Histogram.Snapshot.empty
+ var accumulatedExternalServices: Histogram.Snapshot = Histogram.Snapshot.empty
val externalByHostSnapshots = mutable.Map.empty[String, List[Histogram.Snapshot]]
val externalByHostAndLibrarySnapshots = mutable.Map.empty[(String, String), List[Histogram.Snapshot]]
val externalScopedByHostAndLibrarySnapshots = mutable.Map.empty[(String, String, String), List[Histogram.Snapshot]]
- val transactionMetrics = metrics.collect {
- case (TraceMetrics(traceName), tms: TraceMetricsSnapshot) ⇒
-
- tms.segments.foreach {
- case (SegmentMetricIdentity(segmentName, category, library), snapshot: Histogram.Snapshot) if category.equals(HttpClient) ⇒
- accumulatedExternalServices = accumulatedExternalServices.merge(snapshot, collectionContext)
+ val transactionMetrics = metrics.filterKeys(_.category == "trace").map {
+ case (entity: Entity, es: EntitySnapshot) ⇒
+ // Trace metrics only have elapsed-time and segments and all of them are Histograms.
+ es.histograms.foreach {
+ case (key, segmentSnapshot) if key.metadata.get("category").filter(_ == "http-client").nonEmpty ⇒
+ val library = key.metadata("library")
+ accumulatedExternalServices = accumulatedExternalServices.merge(segmentSnapshot, collectionContext)
// Accumulate externals by host
- externalByHostSnapshots.update(segmentName, snapshot :: externalByHostSnapshots.getOrElse(segmentName, Nil))
+ externalByHostSnapshots.update(key.name, segmentSnapshot :: externalByHostSnapshots.getOrElse(key.name, Nil))
// Accumulate externals by host and library
- externalByHostAndLibrarySnapshots.update((segmentName, library),
- snapshot :: externalByHostAndLibrarySnapshots.getOrElse((segmentName, library), Nil))
+ externalByHostAndLibrarySnapshots.update((key.name, library),
+ segmentSnapshot :: externalByHostAndLibrarySnapshots.getOrElse((key.name, library), Nil))
// Accumulate externals by host and library, including the transaction as scope.
- externalScopedByHostAndLibrarySnapshots.update((segmentName, library, traceName),
- snapshot :: externalScopedByHostAndLibrarySnapshots.getOrElse((segmentName, library, traceName), Nil))
+ externalScopedByHostAndLibrarySnapshots.update((key.name, library, entity.name),
+ segmentSnapshot :: externalScopedByHostAndLibrarySnapshots.getOrElse((key.name, library, entity.name), Nil))
- case otherSegments ⇒ // Ignore other kinds of segments.
- }
+ case otherSegments ⇒
- accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(tms.elapsedTime, collectionContext)
- tms.elapsedTime.recordsIterator.foreach { record ⇒
- apdexBuilder.record(Scale.convert(tms.elapsedTime.scale, Scale.Unit, record.level), record.count)
}
- Metric.fromKamonMetricSnapshot(tms.elapsedTime, "WebTransaction/Custom/" + traceName, None, Scale.Unit)
- }
+ es.histograms.collect {
+ case (key, elapsedTime) if key.name == "elapsed-time" ⇒
+ accumulatedHttpDispatcher = accumulatedHttpDispatcher.merge(elapsedTime, collectionContext)
+ elapsedTime.recordsIterator.foreach { record ⇒
+ apdexBuilder.record(Time.Nanoseconds.scale(Time.Seconds)(record.level), record.count)
+ }
+
+ Metric(elapsedTime, key.unitOfMeasurement, "WebTransaction/Custom/" + entity.name, None)
+ }
+ } flatten
- val httpDispatcher = Metric.fromKamonMetricSnapshot(accumulatedHttpDispatcher, "HttpDispatcher", None, Scale.Unit)
+ val httpDispatcher = Metric(accumulatedHttpDispatcher, Time.Seconds, "HttpDispatcher", None)
val webTransaction = httpDispatcher.copy(MetricID("WebTransaction", None))
val webTransactionTotal = httpDispatcher.copy(MetricID("WebTransactionTotalTime", None))
- val externalAllWeb = Metric.fromKamonMetricSnapshot(accumulatedExternalServices, "External/allWeb", None, Scale.Unit)
+ val externalAllWeb = Metric(accumulatedExternalServices, Time.Seconds, "External/allWeb", None)
val externalAll = externalAllWeb.copy(MetricID("External/all", None))
val externalByHost = externalByHostSnapshots.map {
case (host, snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/all", None, Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/all", None)
}
val externalByHostAndLibrary = externalByHostAndLibrarySnapshots.map {
case ((host, library), snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/$library", None, Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/$library", None)
}
val externalScopedByHostAndLibrary = externalScopedByHostAndLibrarySnapshots.map {
case ((host, library, traceName), snapshots) ⇒
- val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty(Scale.Nano))(_.merge(_, collectionContext))
- Metric.fromKamonMetricSnapshot(mergedSnapshots, s"External/$host/$library", Some("WebTransaction/Custom/" + traceName), Scale.Unit)
+ val mergedSnapshots = snapshots.foldLeft(Histogram.Snapshot.empty)(_.merge(_, collectionContext))
+ Metric(mergedSnapshots, Time.Seconds, s"External/$host/$library", Some("WebTransaction/Custom/" + traceName))
}
Map(httpDispatcher, webTransaction, webTransactionTotal, externalAllWeb, externalAll, apdexBuilder.build) ++
diff --git a/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala b/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
index adab1a34..05d3533b 100644
--- a/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
+++ b/kamon-newrelic/src/test/scala/kamon/newrelic/AgentSpec.scala
@@ -22,7 +22,6 @@ import akka.actor.{ ActorRef, ActorSystem, Props }
import akka.io.IO
import akka.testkit._
import com.typesafe.config.ConfigFactory
-import kamon.AkkaExtensionSwap
import org.scalatest.{ BeforeAndAfterAll, WordSpecLike }
import spray.can.Http
import spray.http._
@@ -30,6 +29,7 @@ import spray.httpx.encoding.Deflate
import spray.httpx.{ SprayJsonSupport, RequestBuilding }
import spray.json.JsArray
import spray.json._
+import testkit.AkkaExtensionSwap
class AgentSpec extends TestKitBase with WordSpecLike with BeforeAndAfterAll with RequestBuilding with SprayJsonSupport {
import JsonProtocol._
diff --git a/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala b/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
index ff977398..13ccbae3 100644
--- a/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
+++ b/kamon-newrelic/src/test/scala/kamon/newrelic/MetricReporterSpec.scala
@@ -21,15 +21,17 @@ import akka.io.IO
import akka.testkit._
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
-import kamon.metric.{ TraceMetrics, Metrics }
-import kamon.{ MilliTimestamp, Kamon, AkkaExtensionSwap }
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.{ Entity, Metrics, TraceMetrics }
+import kamon.util.MilliTimestamp
+import kamon.Kamon
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import org.scalatest.{ Matchers, WordSpecLike }
import spray.can.Http
import spray.http.Uri.Query
import spray.http._
import spray.httpx.encoding.Deflate
import spray.httpx.{ RequestBuilding, SprayJsonSupport }
+import testkit.AkkaExtensionSwap
import scala.concurrent.duration._
import spray.json._
@@ -133,20 +135,20 @@ class MetricReporterSpec extends TestKitBase with WordSpecLike with Matchers wit
}
trait FakeTickSnapshotsFixture {
- val testTraceID = TraceMetrics("example-trace")
- val recorder = Kamon(Metrics).register(testTraceID, TraceMetrics.Factory).get
+ val testTraceID = Entity("example-trace", "trace")
+ val recorder = Kamon(Metrics).register(TraceMetrics, testTraceID.name).get.recorder
val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
def collectRecorder = recorder.collect(collectionContext)
- recorder.elapsedTime.record(1000000)
- recorder.elapsedTime.record(2000000)
- recorder.elapsedTime.record(3000000)
+ recorder.ElapsedTime.record(1000000)
+ recorder.ElapsedTime.record(2000000)
+ recorder.ElapsedTime.record(3000000)
val firstSnapshot = TickMetricSnapshot(new MilliTimestamp(1415587618000L), new MilliTimestamp(1415587678000L), Map(testTraceID -> collectRecorder))
- recorder.elapsedTime.record(6000000)
- recorder.elapsedTime.record(5000000)
- recorder.elapsedTime.record(4000000)
+ recorder.ElapsedTime.record(6000000)
+ recorder.ElapsedTime.record(5000000)
+ recorder.ElapsedTime.record(4000000)
val secondSnapshot = TickMetricSnapshot(new MilliTimestamp(1415587678000L), new MilliTimestamp(1415587738000L), Map(testTraceID -> collectRecorder))
}
} \ No newline at end of file
diff --git a/kamon-play/src/main/resources/reference.conf b/kamon-play/src/main/resources/reference.conf
index 5ad070ce..7456bbb4 100644
--- a/kamon-play/src/main/resources/reference.conf
+++ b/kamon-play/src/main/resources/reference.conf
@@ -21,6 +21,6 @@ kamon {
# to traces and client http segments.
name-generator = kamon.play.DefaultPlayNameGenerator
- dispatcher = ${kamon.default-dispatcher}
+ dispatcher = "akka.actor.default-dispatcher"
}
} \ No newline at end of file
diff --git a/kamon-play/src/main/scala/kamon/play/Play.scala b/kamon-play/src/main/scala/kamon/play/Play.scala
index 6c6cbf4c..7ca81028 100644
--- a/kamon-play/src/main/scala/kamon/play/Play.scala
+++ b/kamon-play/src/main/scala/kamon/play/Play.scala
@@ -20,7 +20,7 @@ import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProv
import akka.event.Logging
import kamon.Kamon
import kamon.http.HttpServerMetrics
-import kamon.metric.Metrics
+import kamon.metric.{ Entity, Metrics }
import play.api.libs.ws.WSRequest
import play.api.mvc.RequestHeader
@@ -36,8 +36,14 @@ class PlayExtension(private val system: ExtendedActorSystem) extends Kamon.Exten
log.info(s"Starting the Kamon(Play) extension")
private val config = system.settings.config.getConfig("kamon.play")
+ val httpServerMetrics = {
+ val metricsExtension = Metrics.get(system)
+ val factory = metricsExtension.instrumentFactory(HttpServerMetrics.category)
+ val entity = Entity("play-server", HttpServerMetrics.category)
+
+ Metrics.get(system).register(entity, new HttpServerMetrics(factory)).recorder
+ }
- val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
val defaultDispatcher = system.dispatchers.lookup(config.getString("dispatcher"))
val includeTraceToken: Boolean = config.getBoolean("automatic-trace-token-propagation")
val traceTokenHeaderName: String = config.getString("trace-token-header-name")
diff --git a/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala b/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
index 36eabf8e..715cac5e 100644
--- a/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
+++ b/kamon-play/src/main/scala/kamon/play/action/KamonTraceActions.scala
@@ -16,13 +16,13 @@
package kamon.play.action
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
import play.api.mvc._
import scala.concurrent.Future
case class TraceName[A](name: String)(action: Action[A]) extends Action[A] {
def apply(request: Request[A]): Future[Result] = {
- TraceRecorder.rename(name)
+ TraceContext.currentContext.rename(name)
action(request)
}
lazy val parser = action.parser
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
index 1bafa8ff..38f499b4 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/RequestInstrumentation.scala
@@ -18,7 +18,7 @@ package kamon.play.instrumentation
import kamon.Kamon
import kamon.play.{ Play, PlayExtension }
import kamon.trace.TraceLocal.{ HttpContextKey, HttpContext }
-import kamon.trace.{ TraceLocal, TraceContextAware, TraceRecorder }
+import kamon.trace._
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
import play.api.Routes
@@ -41,15 +41,17 @@ class RequestInstrumentation {
@Before("call(* play.api.GlobalSettings.onRouteRequest(..)) && args(requestHeader)")
def beforeRouteRequest(requestHeader: RequestHeader): Unit = {
- val system = Akka.system()
- val playExtension = Kamon(Play)(system)
- val defaultTraceName = playExtension.generateTraceName(requestHeader)
+ implicit val system = Akka.system()
+ val playExtension = Kamon(Play)
+ val tracer = Kamon(Tracer)
+ val defaultTraceName = playExtension.generateTraceName(requestHeader)
val token = if (playExtension.includeTraceToken) {
requestHeader.headers.toSimpleMap.find(_._1 == playExtension.traceTokenHeaderName).map(_._2)
} else None
- TraceRecorder.start(defaultTraceName, token)(system)
+ val newContext = token.map(t ⇒ tracer.newContext(defaultTraceName, t)).getOrElse(tracer.newContext(defaultTraceName))
+ TraceContext.setCurrentContext(newContext)
}
@Around("call(* play.api.GlobalSettings.doFilter(*)) && args(next)")
@@ -59,11 +61,10 @@ class RequestInstrumentation {
val executor = Kamon(Play)(Akka.system()).defaultDispatcher
def onResult(result: Result): Result = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
+ TraceContext.map { ctx ⇒
ctx.finish()
- val playExtension = Kamon(Play)(system)
-
+ val playExtension = ctx.lookupExtension(Play)
recordHttpServerMetrics(result.header, ctx.name, playExtension)
if (playExtension.includeTraceToken) result.withHeaders(playExtension.traceTokenHeaderName -> ctx.token)
@@ -75,7 +76,7 @@ class RequestInstrumentation {
storeDiagnosticData(requestHeader)
//override the current trace name
- normaliseTraceName(requestHeader).map(TraceRecorder.rename)
+ normaliseTraceName(requestHeader).map(TraceContext.currentContext.rename)
// Invoke the action
next(requestHeader).map(onResult)(executor)
@@ -85,8 +86,8 @@ class RequestInstrumentation {
@Before("call(* play.api.GlobalSettings.onError(..)) && args(request, ex)")
def beforeOnError(request: TraceContextAware, ex: Throwable): Unit = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- recordHttpServerMetrics(InternalServerError.header, ctx.name, Kamon(Play)(system))
+ TraceContext.map { ctx ⇒
+ recordHttpServerMetrics(InternalServerError.header, ctx.name, ctx.lookupExtension(Play))
}
}
@@ -102,7 +103,7 @@ class RequestInstrumentation {
}
object RequestInstrumentation {
- import kamon.metric.Metrics.AtomicGetOrElseUpdateForTriemap
+ import kamon.util.TriemapAtomicGetOrElseUpdate.Syntax
import java.util.Locale
import scala.collection.concurrent.TrieMap
diff --git a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
index fca13c4c..fc58f9da 100644
--- a/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
+++ b/kamon-play/src/main/scala/kamon/play/instrumentation/WSInstrumentation.scala
@@ -16,9 +16,8 @@
package kamon.play.instrumentation
-import kamon.Kamon
import kamon.play.Play
-import kamon.trace.{ SegmentCategory, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation.{ Around, Aspect, Pointcut }
import play.api.libs.ws.{ WSRequest, WSResponse }
@@ -33,8 +32,8 @@ class WSInstrumentation {
@Around("onExecuteRequest(request)")
def aroundExecuteRequest(pjp: ProceedingJoinPoint, request: WSRequest): Any = {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val playExtension = Kamon(Play)(system)
+ TraceContext.map { ctx ⇒
+ val playExtension = ctx.lookupExtension(Play)
val executor = playExtension.defaultDispatcher
val segmentName = playExtension.generateHttpClientSegmentName(request)
val segment = ctx.startSegment(segmentName, SegmentCategory.HttpClient, Play.SegmentLibraryName)
diff --git a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
index 564d5abe..0feecb82 100644
--- a/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/RequestInstrumentationSpec.scala
@@ -17,10 +17,11 @@ package kamon.play
import kamon.Kamon
import kamon.http.HttpServerMetrics
-import kamon.metric.{ CollectionContext, Metrics, TraceMetrics }
+import kamon.metric.{ Metrics, TraceMetrics }
+import kamon.metric.instrument.CollectionContext
import kamon.play.action.TraceName
import kamon.trace.TraceLocal.HttpContextKey
-import kamon.trace.{ TraceLocal, TraceRecorder }
+import kamon.trace.{ TraceLocal, TraceContext }
import org.scalatestplus.play._
import play.api.DefaultGlobal
import play.api.http.Writeable
@@ -118,7 +119,7 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
"respond to the Async Action with X-Trace-Token and the renamed trace" in {
val result = Await.result(route(FakeRequest(GET, "/async-renamed").withHeaders(traceTokenHeader)).get, 10 seconds)
- TraceRecorder.currentContext.name must be("renamed-trace")
+ TraceContext.currentContext.name must be("renamed-trace")
Some(result.header.headers(traceTokenHeaderName)) must be(expectedToken)
}
@@ -129,17 +130,17 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
"response to the getRouted Action and normalise the current TraceContext name" in {
Await.result(WS.url("http://localhost:19001/getRouted").get(), 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("getRouted.get")) must not be empty
+ Kamon(Metrics)(Akka.system()).find("getRouted.get", "trace") must not be empty
}
"response to the postRouted Action and normalise the current TraceContext name" in {
Await.result(WS.url("http://localhost:19001/postRouted").post("content"), 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("postRouted.post")) must not be empty
+ Kamon(Metrics)(Akka.system()).find("postRouted.post", "trace") must not be empty
}
"response to the showRouted Action and normalise the current TraceContext name" in {
Await.result(WS.url("http://localhost:19001/showRouted/2").get(), 10 seconds)
- Kamon(Metrics)(Akka.system()).storage.get(TraceMetrics("show.some.id.get")) must not be empty
+ Kamon(Metrics)(Akka.system()).find("show.some.id.get", "trace") must not be empty
}
"include HttpContext information for help to diagnose possible errors" in {
@@ -154,7 +155,7 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
"record http server metrics for all processed requests" in {
val collectionContext = CollectionContext(100)
- Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ Kamon(Metrics)(Akka.system()).find("play-server", "http-server").get.collect(collectionContext)
for (repetition ← 1 to 10) {
Await.result(route(FakeRequest(GET, "/default").withHeaders(traceTokenHeader)).get, 10 seconds)
@@ -168,13 +169,13 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
Await.result(routeWithOnError(FakeRequest(GET, "/error").withHeaders(traceTokenHeader)).get, 10 seconds)
}
- val snapshot = Kamon(Metrics)(Akka.system()).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
- snapshot.countsPerTraceAndStatusCode("GET: /default")("200").count must be(10)
- snapshot.countsPerTraceAndStatusCode("GET: /notFound")("404").count must be(5)
- snapshot.countsPerTraceAndStatusCode("GET: /error")("500").count must be(5)
- snapshot.countsPerStatusCode("200").count must be(10)
- snapshot.countsPerStatusCode("404").count must be(5)
- snapshot.countsPerStatusCode("500").count must be(5)
+ val snapshot = Kamon(Metrics)(Akka.system()).find("play-server", "http-server").get.collect(collectionContext)
+ snapshot.counter("GET: /default_200").get.count must be(10)
+ snapshot.counter("GET: /notFound_404").get.count must be(5)
+ snapshot.counter("GET: /error_500").get.count must be(5)
+ snapshot.counter("200").get.count must be(10)
+ snapshot.counter("404").get.count must be(5)
+ snapshot.counter("500").get.count must be(5)
}
}
@@ -186,7 +187,7 @@ class RequestInstrumentationSpec extends PlaySpec with OneServerPerSuite {
object TraceLocalFilter extends Filter {
override def apply(next: (RequestHeader) ⇒ Future[Result])(header: RequestHeader): Future[Result] = {
- TraceRecorder.withTraceContext(TraceRecorder.currentContext) {
+ TraceContext.withContext(TraceContext.currentContext) {
TraceLocal.store(TraceLocalKey)(header.headers.get(traceLocalStorageKey).getOrElse("unknown"))
diff --git a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
index 3629c1d1..3dec2ebf 100644
--- a/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
+++ b/kamon-play/src/test/scala/kamon/play/WSInstrumentationSpec.scala
@@ -17,9 +17,8 @@
package kamon.play
import kamon.Kamon
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-import kamon.metric.{ Metrics, TraceMetrics }
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity, TraceRecorder }
+import kamon.metric.{ Metrics, EntitySnapshot, TraceMetrics }
+import kamon.trace.{ Tracer, TraceContext, SegmentCategory }
import org.scalatest.{ Matchers, WordSpecLike }
import org.scalatestplus.play.OneServerPerSuite
import play.api.libs.ws.WS
@@ -33,7 +32,7 @@ import scala.concurrent.Await
import scala.concurrent.duration._
class WSInstrumentationSpec extends WordSpecLike with Matchers with OneServerPerSuite {
-
+ import kamon.metric.TraceMetricsSpec.SegmentSyntax
System.setProperty("config.file", "./kamon-play/src/test/resources/conf/application.conf")
implicit override lazy val app = FakeApplication(withRoutes = {
@@ -47,29 +46,32 @@ class WSInstrumentationSpec extends WordSpecLike with Matchers with OneServerPer
Await.result(route(FakeRequest(GET, "/inside")).get, 10 seconds)
val snapshot = takeSnapshotOf("GET: /inside")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
- snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("http://localhost:19001/async", SegmentCategory.HttpClient, Play.SegmentLibraryName)).numberOfMeasurements should be(1)
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ // snapshot.segments.size should be(1)
+ // snapshot.segment("http://localhost:19001/async", SegmentCategory.HttpClient, Play.SegmentLibraryName).numberOfMeasurements should be(1)
}
"propagate the TraceContext outside an Action and complete the WS request" in {
- TraceRecorder.withNewTraceContext("trace-outside-action") {
+ TraceContext.withContext(newContext("trace-outside-action")) {
Await.result(WS.url("http://localhost:19001/outside").get(), 10 seconds)
- TraceRecorder.finish()
- }(Akka.system())
+ TraceContext.currentContext.finish()
+ }
val snapshot = takeSnapshotOf("trace-outside-action")
- snapshot.elapsedTime.numberOfMeasurements should be(1)
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
snapshot.segments.size should be(1)
- snapshot.segments(SegmentMetricIdentity("http://localhost:19001/outside", SegmentCategory.HttpClient, Play.SegmentLibraryName)).numberOfMeasurements should be(1)
+ snapshot.segment("http://localhost:19001/outside", SegmentCategory.HttpClient, Play.SegmentLibraryName).numberOfMeasurements should be(1)
}
}
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics)(Akka.system()).register(TraceMetrics(traceName), TraceMetrics.Factory)
+ def newContext(name: String): TraceContext =
+ Kamon(Tracer)(Akka.system).newContext(name)
+
+ def takeSnapshotOf(traceName: String): EntitySnapshot = {
+ val recorder = Kamon(Metrics)(Akka.system()).register(TraceMetrics, traceName).get.recorder
val collectionContext = Kamon(Metrics)(Akka.system()).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
+ recorder.collect(collectionContext)
}
def callWSinsideController(url: String) = Action.async {
diff --git a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
index eda02f4e..94c91a8a 100644
--- a/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
+++ b/kamon-playground/src/main/scala/test/SimpleRequestProcessor.scala
@@ -20,10 +20,10 @@ import akka.actor._
import akka.routing.RoundRobinPool
import akka.util.Timeout
import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import kamon.metric._
import kamon.spray.KamonTraceDirectives
-import kamon.trace.{ Trace, SegmentCategory, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import spray.http.{ StatusCodes, Uri }
import spray.httpx.RequestBuilding
import spray.routing.SimpleRoutingApp
@@ -46,27 +46,9 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
def receive: Actor.Receive = { case any ⇒ sender ! any }
}), "com")
- Kamon(Trace).subscribe(printer)
- //val buffer = system.actorOf(TickMetricSnapshotBuffer.props(30 seconds, printer))
-
- //Kamon(Metrics).subscribe(CustomMetric, "*", buffer, permanently = true)
- //Kamon(Metrics).subscribe(ActorMetrics, "*", printer, permanently = true)
-
implicit val timeout = Timeout(30 seconds)
- val counter = Kamon(UserMetrics).registerCounter("requests")
- Kamon(UserMetrics).registerCounter("requests-2")
- Kamon(UserMetrics).registerCounter("requests-3")
-
- Kamon(UserMetrics).registerHistogram("histogram-1")
- Kamon(UserMetrics).registerHistogram("histogram-2")
-
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-1")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-2")
- Kamon(UserMetrics).registerMinMaxCounter("min-max-counter-3")
-
- //Kamon(UserMetrics).registerGauge("test-gauge")(() => 10L)
-
+ val counter = Kamon(UserMetrics).counter("requests")
val pipeline = sendReceive
val replier = system.actorOf(Props[Replier].withRouter(RoundRobinPool(nrOfInstances = 4)), "replier")
@@ -133,7 +115,7 @@ object SimpleRequestProcessor extends App with SimpleRoutingApp with RequestBuil
} ~
path("segment") {
complete {
- val segment = TraceRecorder.currentContext.startSegment("hello-world", SegmentCategory.HttpClient, "none")
+ val segment = TraceContext.currentContext.startSegment("hello-world", SegmentCategory.HttpClient, "none")
(replier ? "hello").mapTo[String].onComplete { t ⇒
segment.finish()
}
@@ -179,7 +161,7 @@ object Verifier extends App {
class Replier extends Actor with ActorLogging {
def receive = {
case anything ⇒
- if (TraceRecorder.currentContext.isEmpty)
+ if (TraceContext.currentContext.isEmpty)
log.warning("PROCESSING A MESSAGE WITHOUT CONTEXT")
//log.info("Processing at the Replier, and self is: {}", self)
diff --git a/kamon-scala/src/main/resources/META-INF/aop.xml b/kamon-scala/src/main/resources/META-INF/aop.xml
new file mode 100644
index 00000000..a1e98a9f
--- /dev/null
+++ b/kamon-scala/src/main/resources/META-INF/aop.xml
@@ -0,0 +1,17 @@
+<!DOCTYPE aspectj PUBLIC "-//AspectJ//DTD//EN" "http://www.eclipse.org/aspectj/dtd/aspectj.dtd">
+
+<aspectj>
+ <aspects>
+
+ <!-- Futures -->
+ <aspect name="kamon.scala.instrumentation.FutureInstrumentation"/>
+ <aspect name="kamon.scalaz.instrumentation.FutureInstrumentation"/>
+
+ </aspects>
+
+ <weaver>
+ <include within="scala.concurrent..*"/>
+ <include within="scalaz.concurrent..*"/>
+ </weaver>
+
+</aspectj> \ No newline at end of file
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala b/kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala
index bda2da78..01514869 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/scala/FutureInstrumentation.scala
+++ b/kamon-scala/src/main/scala/kamon/scala/instrumentation/FutureInstrumentation.scala
@@ -14,9 +14,9 @@
* =========================================================================================
*/
-package kamon.instrumentation.scala
+package kamon.scala.instrumentation
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -40,7 +40,7 @@ class FutureInstrumentation {
@Around("futureRelatedRunnableExecution(runnable)")
def aroundExecution(pjp: ProceedingJoinPoint, runnable: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(runnable.traceContext) {
+ TraceContext.withContext(runnable.traceContext) {
pjp.proceed()
}
}
diff --git a/kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala b/kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala
index 65caaa8f..b5aadbd3 100644
--- a/kamon-core/src/main/scala/kamon/instrumentation/scalaz/FutureInstrumentation.scala
+++ b/kamon-scala/src/main/scala/kamon/scalaz/instrumentation/FutureInstrumentation.scala
@@ -14,9 +14,9 @@
* =========================================================================================
*/
-package kamon.instrumentation.scalaz
+package kamon.scalaz.instrumentation
-import kamon.trace.{ TraceContextAware, TraceRecorder }
+import kamon.trace.{ TraceContext, TraceContextAware }
import org.aspectj.lang.ProceedingJoinPoint
import org.aspectj.lang.annotation._
@@ -40,7 +40,7 @@ class FutureInstrumentation {
@Around("futureRelatedCallableExecution(callable)")
def aroundExecution(pjp: ProceedingJoinPoint, callable: TraceContextAware): Any =
- TraceRecorder.withInlineTraceContextReplacement(callable.traceContext) {
+ TraceContext.withContext(callable.traceContext) {
pjp.proceed()
}
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala b/kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala
index 31afd3ff..d70e88ae 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/scala/FutureInstrumentationSpec.scala
+++ b/kamon-scala/src/test/scala/kamon/scala/instrumentation/FutureInstrumentationSpec.scala
@@ -13,18 +13,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.instrumentation.scala
+package kamon.scala.instrumentation
-import akka.actor.ActorSystem
-import akka.testkit.TestKit
-import kamon.trace.TraceRecorder
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import org.scalatest.OptionValues
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
import scala.concurrent.Future
-class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentation-spec")) with WordSpecLike with Matchers
- with ScalaFutures with PatienceConfiguration with OptionValues {
+class FutureInstrumentationSpec extends BaseKamonSpec("future-instrumentation-spec") with ScalaFutures
+ with PatienceConfiguration with OptionValues {
implicit val execContext = system.dispatcher
@@ -32,10 +31,10 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"capture the TraceContext available when created" which {
"must be available when executing the future's body" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
- val future = Future(TraceRecorder.currentContext)
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
+ val future = Future(TraceContext.currentContext)
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
whenReady(future)(ctxInFuture ⇒
@@ -44,14 +43,14 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"must be available when executing callbacks on the future" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
val future = Future("Hello Kamon!")
// The TraceContext is expected to be available during all intermediate processing.
.map(_.length)
.flatMap(len ⇒ Future(len.toString))
- .map(s ⇒ TraceRecorder.currentContext)
+ .map(s ⇒ TraceContext.currentContext)
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
whenReady(future)(ctxInFuture ⇒
diff --git a/kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala b/kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala
index 29bf96f8..ba8fa18c 100644
--- a/kamon-core/src/test/scala/kamon/instrumentation/scalaz/FutureInstrumentationSpec.scala
+++ b/kamon-scala/src/test/scala/kamon/scalaz/instrumentation/FutureInstrumentationSpec.scala
@@ -13,18 +13,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package kamon.instrumentation.scalaz
+package kamon.scalaz.instrumentation
-import akka.actor.ActorSystem
-import akka.testkit.TestKit
-import kamon.trace.TraceRecorder
+import java.util.concurrent.Executors
+
+import kamon.testkit.BaseKamonSpec
+import kamon.trace.TraceContext
+import org.scalatest.OptionValues
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, OptionValues, WordSpecLike }
+
import scalaz.concurrent.Future
-import java.util.concurrent.Executors
-class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentation-spec")) with WordSpecLike with Matchers
- with ScalaFutures with PatienceConfiguration with OptionValues {
+class FutureInstrumentationSpec extends BaseKamonSpec("future-instrumentation-spec") with ScalaFutures
+ with PatienceConfiguration with OptionValues {
implicit val execContext = Executors.newCachedThreadPool()
@@ -32,10 +33,10 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"capture the TraceContext available when created" which {
"must be available when executing the future's body" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
- val future = Future(TraceRecorder.currentContext).start
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
+ val future = Future(TraceContext.currentContext).start
- (future, TraceRecorder.currentContext)
+ (future, TraceContext.currentContext)
}
val ctxInFuture = future.run
@@ -44,14 +45,14 @@ class FutureInstrumentationSpec extends TestKit(ActorSystem("future-instrumentat
"must be available when executing callbacks on the future" in {
- val (future, testTraceContext) = TraceRecorder.withNewTraceContext("future-body") {
+ val (future, testTraceContext) = TraceContext.withContext(newContext("future-body")) {
val future = Future("Hello Kamon!")
// The TraceContext is expected to be available during all intermediate processing.
.map(_.length)
.flatMap(len ⇒ Future(len.toString))
- .map(s ⇒ TraceRecorder.currentContext)
+ .map(s ⇒ TraceContext.currentContext)
- (future.start, TraceRecorder.currentContext)
+ (future.start, TraceContext.currentContext)
}
val ctxInFuture = future.run
diff --git a/kamon-spray/src/main/resources/META-INF/aop.xml b/kamon-spray/src/main/resources/META-INF/aop.xml
index 0e5726c6..00e8763a 100644
--- a/kamon-spray/src/main/resources/META-INF/aop.xml
+++ b/kamon-spray/src/main/resources/META-INF/aop.xml
@@ -2,14 +2,16 @@
<aspectj>
<aspects>
+
<!-- Spray Server -->
- <aspect name="spray.can.server.ServerRequestInstrumentation"/>
+ <aspect name="spray.can.server.instrumentation.ServerRequestInstrumentation"/>
<!-- Spray Client -->
<aspect name="spray.can.client.ClientRequestInstrumentation"/>
+
</aspects>
<weaver>
- <include within="spray.can..*"/>
+ <include within="spray..*"/>
</weaver>
</aspectj>
diff --git a/kamon-spray/src/main/resources/reference.conf b/kamon-spray/src/main/resources/reference.conf
index 5c5e9317..bdba21cb 100644
--- a/kamon-spray/src/main/resources/reference.conf
+++ b/kamon-spray/src/main/resources/reference.conf
@@ -4,6 +4,7 @@
kamon {
spray {
+
# Header name used when propagating the `TraceContext.token` value across applications.
trace-token-header-name = "X-Trace-Token"
@@ -23,16 +24,16 @@ kamon {
client {
# Strategy used for automatic trace segment generation when issue requests with spray-client. The possible values
# are:
- # - pipelining: measures the time during which the user application code is waiting for a spray-client request to
+ # - request-level: measures the time during which the user application code is waiting for a spray-client request to
# complete, by attaching a callback to the Future[HttpResponse] returned by `spray.client.pipelining.sendReceive`.
# If `spray.client.pipelining.sendReceive` is not used, the segment measurement wont be performed.
- # - internal: measures the internal time taken by spray-client to finish a request. Sometimes the user application
+ # - host-level: measures the internal time taken by spray-client to finish a request. Sometimes the user application
# code has a finite future timeout (like when using `spray.client.pipelining.sendReceive`) that doesn't match
# the actual amount of time spray might take internally to resolve a request, counting retries, redirects,
# connection timeouts and so on. If using the internal strategy, the measured time will include the entire time
# since the request has been received by the corresponding `HttpHostConnector` until a response is sent back
# to the requester.
- segment-collection-strategy = pipelining
+ instrumentation-level = request-level
}
}
} \ No newline at end of file
diff --git a/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala b/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
index e98b63d9..4eefee95 100644
--- a/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/KamonTraceDirectives.scala
@@ -17,11 +17,11 @@ package kamon.spray
import spray.routing.directives.BasicDirectives
import spray.routing._
-import kamon.trace.TraceRecorder
+import kamon.trace.TraceContext
trait KamonTraceDirectives extends BasicDirectives {
def traceName(name: String): Directive0 = mapRequest { req ⇒
- TraceRecorder.rename(name)
+ TraceContext.currentContext.rename(name)
req
}
}
diff --git a/kamon-spray/src/main/scala/kamon/spray/Spray.scala b/kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala
index ab8d6a7d..3df8d972 100644
--- a/kamon-spray/src/main/scala/kamon/spray/Spray.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/SprayExtension.scala
@@ -18,47 +18,49 @@ package kamon.spray
import akka.actor.{ ExtendedActorSystem, ExtensionIdProvider, ExtensionId }
import akka.actor
+import akka.event.{ Logging, LoggingAdapter }
import kamon.Kamon
import kamon.http.HttpServerMetrics
-import kamon.metric.Metrics
+import kamon.metric.{ Entity, Metrics }
import spray.http.HttpHeaders.Host
import spray.http.HttpRequest
object Spray extends ExtensionId[SprayExtension] with ExtensionIdProvider {
def lookup(): ExtensionId[_ <: actor.Extension] = Spray
- def createExtension(system: ExtendedActorSystem): SprayExtension = new SprayExtension(system)
+ def createExtension(system: ExtendedActorSystem): SprayExtension = new SprayExtensionImpl(system)
val SegmentLibraryName = "spray-client"
}
-object ClientSegmentCollectionStrategy {
- sealed trait Strategy
- case object Pipelining extends Strategy
- case object Internal extends Strategy
+trait SprayExtension extends Kamon.Extension {
+ def settings: SprayExtensionSettings
+ def log: LoggingAdapter
+ def httpServerMetrics: HttpServerMetrics
+ def generateTraceName(request: HttpRequest): String
+ def generateRequestLevelApiSegmentName(request: HttpRequest): String
+ def generateHostLevelApiSegmentName(request: HttpRequest): String
}
-class SprayExtension(private val system: ExtendedActorSystem) extends Kamon.Extension {
- private val config = system.settings.config.getConfig("kamon.spray")
+class SprayExtensionImpl(system: ExtendedActorSystem) extends SprayExtension {
+ val settings = SprayExtensionSettings(system)
+ val log = Logging(system, "SprayExtension")
- val includeTraceToken: Boolean = config.getBoolean("automatic-trace-token-propagation")
- val traceTokenHeaderName: String = config.getString("trace-token-header-name")
- val httpServerMetrics = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get
- // It's safe to assume that HttpServerMetrics will always exist because there is no particular filter for it.
+ val httpServerMetrics = {
+ val metricsExtension = Metrics.get(system)
+ val factory = metricsExtension.instrumentFactory(HttpServerMetrics.category)
+ val entity = Entity("spray-server", HttpServerMetrics.category)
- private val nameGeneratorFQN = config.getString("name-generator")
- private val nameGenerator: SprayNameGenerator = system.dynamicAccess.createInstanceFor[SprayNameGenerator](nameGeneratorFQN, Nil).get // let's bubble up any problems.
+ Metrics.get(system).register(entity, new HttpServerMetrics(factory)).recorder
+ }
- val clientSegmentCollectionStrategy: ClientSegmentCollectionStrategy.Strategy =
- config.getString("client.segment-collection-strategy") match {
- case "pipelining" ⇒ ClientSegmentCollectionStrategy.Pipelining
- case "internal" ⇒ ClientSegmentCollectionStrategy.Internal
- case other ⇒ throw new IllegalArgumentException(s"Configured segment-collection-strategy [$other] is invalid, " +
- s"only pipelining and internal are valid options.")
- }
+ def generateTraceName(request: HttpRequest): String =
+ settings.nameGenerator.generateTraceName(request)
- def generateTraceName(request: HttpRequest): String = nameGenerator.generateTraceName(request)
- def generateRequestLevelApiSegmentName(request: HttpRequest): String = nameGenerator.generateRequestLevelApiSegmentName(request)
- def generateHostLevelApiSegmentName(request: HttpRequest): String = nameGenerator.generateHostLevelApiSegmentName(request)
+ def generateRequestLevelApiSegmentName(request: HttpRequest): String =
+ settings.nameGenerator.generateRequestLevelApiSegmentName(request)
+
+ def generateHostLevelApiSegmentName(request: HttpRequest): String =
+ settings.nameGenerator.generateHostLevelApiSegmentName(request)
}
trait SprayNameGenerator {
@@ -68,14 +70,19 @@ trait SprayNameGenerator {
}
class DefaultSprayNameGenerator extends SprayNameGenerator {
- def hostFromHeaders(request: HttpRequest): Option[String] = request.header[Host].map(_.host)
def generateRequestLevelApiSegmentName(request: HttpRequest): String = {
val uriAddress = request.uri.authority.host.address
if (uriAddress.equals("")) hostFromHeaders(request).getOrElse("unknown-host") else uriAddress
}
- def generateHostLevelApiSegmentName(request: HttpRequest): String = hostFromHeaders(request).getOrElse("unknown-host")
+ def generateHostLevelApiSegmentName(request: HttpRequest): String =
+ hostFromHeaders(request).getOrElse("unknown-host")
+
+ def generateTraceName(request: HttpRequest): String =
+ request.method.value + ": " + request.uri.path
+
+ private def hostFromHeaders(request: HttpRequest): Option[String] =
+ request.header[Host].map(_.host)
- def generateTraceName(request: HttpRequest): String = request.method.value + ": " + request.uri.path
}
diff --git a/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala b/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala
new file mode 100644
index 00000000..44c71eaf
--- /dev/null
+++ b/kamon-spray/src/main/scala/kamon/spray/SprayExtensionSettings.scala
@@ -0,0 +1,35 @@
+package kamon.spray
+
+import akka.actor.ExtendedActorSystem
+
+case class SprayExtensionSettings(
+ includeTraceTokenHeader: Boolean,
+ traceTokenHeaderName: String,
+ nameGenerator: SprayNameGenerator,
+ clientInstrumentationLevel: ClientInstrumentationLevel.Level)
+
+object SprayExtensionSettings {
+ def apply(system: ExtendedActorSystem): SprayExtensionSettings = {
+ val config = system.settings.config.getConfig("kamon.spray")
+
+ val includeTraceTokenHeader: Boolean = config.getBoolean("automatic-trace-token-propagation")
+ val traceTokenHeaderName: String = config.getString("trace-token-header-name")
+
+ val nameGeneratorFQN = config.getString("name-generator")
+ val nameGenerator: SprayNameGenerator = system.dynamicAccess.createInstanceFor[SprayNameGenerator](nameGeneratorFQN, Nil).get // let's bubble up any problems.
+
+ val clientInstrumentationLevel: ClientInstrumentationLevel.Level = config.getString("client.instrumentation-level") match {
+ case "request-level" ⇒ ClientInstrumentationLevel.RequestLevelAPI
+ case "host-level" ⇒ ClientInstrumentationLevel.HostLevelAPI
+ case other ⇒ sys.error(s"Invalid client instrumentation level [$other] found in configuration.")
+ }
+
+ SprayExtensionSettings(includeTraceTokenHeader, traceTokenHeaderName, nameGenerator, clientInstrumentationLevel)
+ }
+}
+
+object ClientInstrumentationLevel {
+ sealed trait Level
+ case object RequestLevelAPI extends Level
+ case object HostLevelAPI extends Level
+}
diff --git a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala
index 813915c4..fa9063ad 100644
--- a/kamon-spray/src/main/scala/spray/can/client/ClientRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ClientRequestInstrumentation.scala
@@ -21,8 +21,7 @@ import org.aspectj.lang.ProceedingJoinPoint
import spray.http._
import spray.http.HttpHeaders.RawHeader
import kamon.trace._
-import kamon.Kamon
-import kamon.spray.{ ClientSegmentCollectionStrategy, Spray }
+import kamon.spray.{ ClientInstrumentationLevel, Spray }
import akka.actor.ActorRef
import scala.concurrent.{ Future, ExecutionContext }
import akka.util.Timeout
@@ -47,10 +46,10 @@ class ClientRequestInstrumentation {
// This read to requestContext.traceContext takes care of initializing the aspect timely.
requestContext.traceContext
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
+ TraceContext.map { ctx ⇒
+ val sprayExtension = ctx.lookupExtension(Spray)
- if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Internal) {
+ if (sprayExtension.settings.clientInstrumentationLevel == ClientInstrumentationLevel.HostLevelAPI) {
if (requestContext.segment.isEmpty) {
val clientRequestName = sprayExtension.generateHostLevelApiSegmentName(request)
val segment = ctx.startSegment(clientRequestName, SegmentCategory.HttpClient, Spray.SegmentLibraryName)
@@ -74,7 +73,7 @@ class ClientRequestInstrumentation {
@Around("copyingRequestContext(old)")
def aroundCopyingRequestContext(pjp: ProceedingJoinPoint, old: TraceContextAware): Any = {
- TraceRecorder.withInlineTraceContextReplacement(old.traceContext) {
+ TraceContext.withContext(old.traceContext) {
pjp.proceed()
}
}
@@ -85,7 +84,7 @@ class ClientRequestInstrumentation {
@Around("dispatchToCommander(requestContext, message)")
def aroundDispatchToCommander(pjp: ProceedingJoinPoint, requestContext: TraceContextAware, message: Any): Any = {
if (requestContext.traceContext.nonEmpty) {
- TraceRecorder.withInlineTraceContextReplacement(requestContext.traceContext) {
+ TraceContext.withContext(requestContext.traceContext) {
if (message.isInstanceOf[HttpMessageEnd])
requestContext.asInstanceOf[SegmentAware].segment.finish()
@@ -112,10 +111,10 @@ class ClientRequestInstrumentation {
val originalSendReceive = pjp.proceed().asInstanceOf[HttpRequest ⇒ Future[HttpResponse]]
(request: HttpRequest) ⇒ {
- TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
+ TraceContext.map { ctx ⇒
+ val sprayExtension = ctx.lookupExtension(Spray)
val segment =
- if (sprayExtension.clientSegmentCollectionStrategy == ClientSegmentCollectionStrategy.Pipelining)
+ if (sprayExtension.settings.clientInstrumentationLevel == ClientInstrumentationLevel.RequestLevelAPI)
ctx.startSegment(sprayExtension.generateRequestLevelApiSegmentName(request), SegmentCategory.HttpClient, Spray.SegmentLibraryName)
else
EmptyTraceContext.EmptySegment
@@ -139,10 +138,10 @@ class ClientRequestInstrumentation {
@Around("includingDefaultHeadersAtHttpHostConnector(request, defaultHeaders)")
def aroundIncludingDefaultHeadersAtHttpHostConnector(pjp: ProceedingJoinPoint, request: HttpMessage, defaultHeaders: List[HttpHeader]): Any = {
- val modifiedHeaders = TraceRecorder.withTraceContextAndSystem { (ctx, system) ⇒
- val sprayExtension = Kamon(Spray)(system)
- if (sprayExtension.includeTraceToken)
- RawHeader(sprayExtension.traceTokenHeaderName, ctx.token) :: defaultHeaders
+ val modifiedHeaders = TraceContext.map { ctx ⇒
+ val sprayExtension = ctx.lookupExtension(Spray)
+ if (sprayExtension.settings.includeTraceTokenHeader)
+ RawHeader(sprayExtension.settings.traceTokenHeaderName, ctx.token) :: defaultHeaders
else
defaultHeaders
@@ -150,4 +149,4 @@ class ClientRequestInstrumentation {
pjp.proceed(Array[AnyRef](request, modifiedHeaders))
}
-}
+} \ No newline at end of file
diff --git a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala
index 1ae4ad80..73287132 100644
--- a/kamon-spray/src/main/scala/spray/can/server/ServerRequestInstrumentation.scala
+++ b/kamon-spray/src/main/scala/kamon/spray/instrumentation/ServerRequestInstrumentation.scala
@@ -13,14 +13,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================== */
-package spray.can.server
+package spray.can.server.instrumentation
import kamon.trace.TraceLocal.{ HttpContext, HttpContextKey }
import org.aspectj.lang.annotation._
import kamon.trace._
import akka.actor.ActorSystem
+import spray.can.server.OpenRequest
import spray.http.{ HttpResponse, HttpMessagePartWrapper, HttpRequest }
-import akka.event.Logging.Warning
import kamon.Kamon
import kamon.spray.{ SprayExtension, Spray }
import org.aspectj.lang.ProceedingJoinPoint
@@ -40,14 +40,16 @@ class ServerRequestInstrumentation {
@After("openRequestInit(openRequest, request)")
def afterInit(openRequest: TraceContextAware, request: HttpRequest): Unit = {
val system: ActorSystem = openRequest.asInstanceOf[OpenRequest].context.actorContext.system
+ val tracer = Tracer.get(system)
val sprayExtension = Kamon(Spray)(system)
val defaultTraceName = sprayExtension.generateTraceName(request)
- val token = if (sprayExtension.includeTraceToken) {
- request.headers.find(_.name == sprayExtension.traceTokenHeaderName).map(_.value)
+ val token = if (sprayExtension.settings.includeTraceTokenHeader) {
+ request.headers.find(_.name == sprayExtension.settings.traceTokenHeaderName).map(_.value)
} else None
- TraceRecorder.start(defaultTraceName, token)(system)
+ val newContext = token.map(customToken ⇒ tracer.newContext(defaultTraceName, customToken)) getOrElse (tracer.newContext(defaultTraceName))
+ TraceContext.setCurrentContext(newContext)
// Necessary to force initialization of traceContext when initiating the request.
openRequest.traceContext
@@ -58,7 +60,7 @@ class ServerRequestInstrumentation {
@After("openNewRequest()")
def afterOpenNewRequest(): Unit = {
- TraceRecorder.clearContext
+ TraceContext.clearCurrentContext
}
@Pointcut("execution(* spray.can.server.OpenRequestComponent$DefaultOpenRequest.handleResponseEndAndReturnNextOpenRequest(..)) && target(openRequest) && args(response)")
@@ -66,26 +68,24 @@ class ServerRequestInstrumentation {
@Around("openRequestCreation(openRequest, response)")
def afterFinishingRequest(pjp: ProceedingJoinPoint, openRequest: TraceContextAware, response: HttpMessagePartWrapper): Any = {
- val incomingContext = TraceRecorder.currentContext
+ val incomingContext = TraceContext.currentContext
val storedContext = openRequest.traceContext
// The stored context is always a DefaultTraceContext if the instrumentation is running
- val system = storedContext.system
-
- verifyTraceContextConsistency(incomingContext, storedContext, system)
+ verifyTraceContextConsistency(incomingContext, storedContext)
if (incomingContext.isEmpty)
pjp.proceed()
else {
- val sprayExtension = Kamon(Spray)(system)
+ val sprayExtension = incomingContext.lookupExtension(Spray)
- val proceedResult = if (sprayExtension.includeTraceToken) {
- val responseWithHeader = includeTraceTokenIfPossible(response, sprayExtension.traceTokenHeaderName, incomingContext.token)
+ val proceedResult = if (sprayExtension.settings.includeTraceTokenHeader) {
+ val responseWithHeader = includeTraceTokenIfPossible(response, sprayExtension.settings.traceTokenHeaderName, incomingContext.token)
pjp.proceed(Array(openRequest, responseWithHeader))
} else pjp.proceed
- TraceRecorder.finish()
+ TraceContext.currentContext.finish()
recordHttpServerMetrics(response, incomingContext.name, sprayExtension)
@@ -96,15 +96,15 @@ class ServerRequestInstrumentation {
}
}
- def verifyTraceContextConsistency(incomingTraceContext: TraceContext, storedTraceContext: TraceContext, system: ActorSystem): Unit = {
- def publishWarning(text: String, system: ActorSystem): Unit =
- system.eventStream.publish(Warning("ServerRequestInstrumentation", classOf[ServerRequestInstrumentation], text))
+ def verifyTraceContextConsistency(incomingTraceContext: TraceContext, storedTraceContext: TraceContext): Unit = {
+ def publishWarning(text: String): Unit =
+ storedTraceContext.lookupExtension(Spray).log.warning(text)
if (incomingTraceContext.nonEmpty) {
if (incomingTraceContext.token != storedTraceContext.token)
- publishWarning(s"Different trace token found when trying to close a trace, original: [${storedTraceContext.token}] - incoming: [${incomingTraceContext.token}]", system)
+ publishWarning(s"Different trace token found when trying to close a trace, original: [${storedTraceContext.token}] - incoming: [${incomingTraceContext.token}]")
} else
- publishWarning(s"EmptyTraceContext present while closing the trace with token [${storedTraceContext.token}]", system)
+ publishWarning(s"EmptyTraceContext present while closing the trace with token [${storedTraceContext.token}]")
}
def recordHttpServerMetrics(response: HttpMessagePartWrapper, traceName: String, sprayExtension: SprayExtension): Unit =
diff --git a/kamon-spray/src/test/resources/application.conf b/kamon-spray/src/test/resources/application.conf
index 4a9b2c67..8b137891 100644
--- a/kamon-spray/src/test/resources/application.conf
+++ b/kamon-spray/src/test/resources/application.conf
@@ -1,26 +1 @@
-kamon {
- metrics {
- tick-interval = 1 second
- filters = [
- {
- actor {
- includes = []
- excludes = [ "system/*", "user/IO-*" ]
- }
- },
- {
- trace {
- includes = [ "*" ]
- excludes = []
- }
- },
- {
- dispatcher {
- includes = [ "default-dispatcher" ]
- excludes = []
- }
- }
- ]
- }
-} \ No newline at end of file
diff --git a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
index b90b0f3b..c5d7d992 100644
--- a/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/ClientRequestInstrumentationSpec.scala
@@ -16,50 +16,36 @@
package kamon.spray
-import akka.testkit.{ TestKitBase, TestProbe }
-import akka.actor.ActorSystem
+import akka.testkit.TestProbe
+import kamon.testkit.BaseKamonSpec
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{ Millis, Seconds, Span }
-import org.scalatest.{ Matchers, WordSpecLike }
import spray.httpx.RequestBuilding
import spray.http.{ HttpResponse, HttpRequest }
-import kamon.trace.{ SegmentCategory, SegmentMetricIdentity, TraceRecorder }
+import kamon.trace.{ TraceContext, SegmentCategory }
import com.typesafe.config.ConfigFactory
import spray.can.Http
import spray.http.HttpHeaders.RawHeader
import kamon.Kamon
-import kamon.metric.{ TraceMetrics, Metrics }
+import kamon.metric.TraceMetricsSpec
import spray.client.pipelining.sendReceive
-import kamon.metric.Subscriptions.TickMetricSnapshot
import scala.concurrent.duration._
-import kamon.metric.TraceMetrics.TraceMetricsSnapshot
-
-class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike with Matchers with ScalaFutures with RequestBuilding with TestServer {
- implicit lazy val system: ActorSystem = ActorSystem("client-request-instrumentation-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | spray {
- | name-generator = kamon.spray.TestSprayNameGenerator
- | }
- |
- | metrics {
- | tick-interval = 1 hour
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+
+class ClientRequestInstrumentationSpec extends BaseKamonSpec("client-request-instrumentation-spec") with ScalaFutures
+ with RequestBuilding with TestServer {
+
+ import TraceMetricsSpec.SegmentSyntax
+
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon {
+ | metric.tick-interval = 1 hour
+ | spray.name-generator = kamon.spray.TestSprayNameGenerator
+ |}
+ |
+ |akka.loggers = ["akka.event.slf4j.Slf4jLogger"]
+ """.stripMargin)
implicit def ec = system.dispatcher
implicit val defaultPatience = PatienceConfig(timeout = Span(10, Seconds), interval = Span(5, Millis))
@@ -71,12 +57,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("include-trace-token-header-at-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("include-trace-token-header-at-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/dummy-path")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -85,7 +71,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should contain(traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -98,12 +84,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("do-not-include-trace-token-header-at-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("do-not-include-trace-token-header-at-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/dummy-path")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -112,7 +98,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -128,12 +114,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, _, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("assign-name-to-segment-with-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("assign-name-to-segment-with-request-level-api")) {
val rF = sendReceive(transport.ref)(ec, 10.seconds) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/request-level-api-segment")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Receive the request and reply back
@@ -142,10 +128,10 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
responseFuture.futureValue.entity.asString should be("ok")
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("assign-name-to-segment-with-request-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("request-level /request-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("assign-name-to-segment-with-request-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("request-level /request-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
"rename a request level api segment once it reaches the relevant host connector" in {
@@ -155,12 +141,12 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val (_, server, bound) = buildSHostConnectorAndServer
// Initiate a request within the context of a trace
- val (testContext, responseFuture) = TraceRecorder.withNewTraceContext("rename-segment-with-request-level-api") {
+ val (testContext, responseFuture) = TraceContext.withContext(newContext("rename-segment-with-request-level-api")) {
val rF = sendReceive(system, ec) {
Get(s"http://${bound.localAddress.getHostName}:${bound.localAddress.getPort}/request-level-api-segment")
}
- (TraceRecorder.currentContext, rF)
+ (TraceContext.currentContext, rF)
}
// Accept the connection at the server side
@@ -173,10 +159,10 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
responseFuture.futureValue.entity.asString should be("ok")
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("rename-segment-with-request-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("host-level /request-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("rename-segment-with-request-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("host-level /request-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
}
@@ -189,9 +175,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("include-trace-token-header-on-http-client-request") {
+ val testContext = TraceContext.withContext(newContext("include-trace-token-header-on-http-client-request")) {
client.send(hostConnector, Get("/dummy-path"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -200,7 +186,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should contain(traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -216,9 +202,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("not-include-trace-token-header-on-http-client-request") {
+ val testContext = TraceContext.withContext(newContext("not-include-trace-token-header-on-http-client-request")) {
client.send(hostConnector, Get("/dummy-path"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -227,7 +213,7 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
@@ -243,9 +229,9 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
val client = TestProbe()
// Initiate a request within the context of a trace
- val testContext = TraceRecorder.withNewTraceContext("create-segment-with-host-level-api") {
+ val testContext = TraceContext.withContext(newContext("create-segment-with-host-level-api")) {
client.send(hostConnector, Get("/host-level-api-segment"))
- TraceRecorder.currentContext
+ TraceContext.currentContext
}
// Accept the connection at the server side
@@ -254,52 +240,39 @@ class ClientRequestInstrumentationSpec extends TestKitBase with WordSpecLike wit
// Receive the request and reply back
val request = server.expectMsgType[HttpRequest]
- request.headers should not contain (RawHeader(Kamon(Spray).traceTokenHeaderName, testContext.token))
+ request.headers should not contain (traceTokenHeader(testContext.token))
// Finish the request cycle, just to avoid error messages on the logs.
server.reply(HttpResponse(entity = "ok"))
client.expectMsgType[HttpResponse]
testContext.finish()
- val traceMetricsSnapshot = takeSnapshotOf("create-segment-with-host-level-api")
- traceMetricsSnapshot.elapsedTime.numberOfMeasurements should be(1)
- traceMetricsSnapshot.segments(SegmentMetricIdentity("host-level /host-level-api-segment",
- SegmentCategory.HttpClient, Spray.SegmentLibraryName)).numberOfMeasurements should be(1)
+ val traceMetricsSnapshot = takeSnapshotOf("create-segment-with-host-level-api", "trace")
+ traceMetricsSnapshot.histogram("elapsed-time").get.numberOfMeasurements should be(1)
+ traceMetricsSnapshot.segment("host-level /host-level-api-segment", SegmentCategory.HttpClient, Spray.SegmentLibraryName)
+ .numberOfMeasurements should be(1)
}
}
}
- def expectTraceMetrics(traceName: String, listener: TestProbe, timeout: FiniteDuration): TraceMetricsSnapshot = {
- val tickSnapshot = within(timeout) {
- listener.expectMsgType[TickMetricSnapshot]
- }
-
- val metricsOption = tickSnapshot.metrics.get(TraceMetrics(traceName))
- metricsOption should not be empty
- metricsOption.get.asInstanceOf[TraceMetricsSnapshot]
- }
-
- def takeSnapshotOf(traceName: String): TraceMetricsSnapshot = {
- val recorder = Kamon(Metrics).register(TraceMetrics(traceName), TraceMetrics.Factory)
- val collectionContext = Kamon(Metrics).buildDefaultCollectionContext
- recorder.get.collect(collectionContext)
- }
+ def traceTokenHeader(token: String): RawHeader =
+ RawHeader(Kamon(Spray).settings.traceTokenHeaderName, token)
- def enableInternalSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientSegmentCollectionStrategy.Internal)
- def enablePipeliningSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientSegmentCollectionStrategy.Pipelining)
+ def enableInternalSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientInstrumentationLevel.HostLevelAPI)
+ def enablePipeliningSegmentCollectionStrategy(): Unit = setSegmentCollectionStrategy(ClientInstrumentationLevel.RequestLevelAPI)
def enableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(true)
def disableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(false)
- def setSegmentCollectionStrategy(strategy: ClientSegmentCollectionStrategy.Strategy): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("clientSegmentCollectionStrategy")
+ def setSegmentCollectionStrategy(strategy: ClientInstrumentationLevel.Level): Unit = {
+ val target = Kamon(Spray)(system).settings
+ val field = target.getClass.getDeclaredField("clientInstrumentationLevel")
field.setAccessible(true)
field.set(target, strategy)
}
def setIncludeTraceToken(include: Boolean): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("includeTraceToken")
+ val target = Kamon(Spray)(system).settings
+ val field = target.getClass.getDeclaredField("includeTraceTokenHeader")
field.setAccessible(true)
field.set(target, include)
}
diff --git a/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
index c4b370d7..58bb2885 100644
--- a/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerMetricsSpec.scala
@@ -1,46 +1,27 @@
package kamon.spray
-import akka.actor.ActorSystem
-import akka.testkit.{ TestProbe, TestKitBase }
+import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.http.HttpServerMetrics
-import kamon.metric._
+import kamon.testkit.BaseKamonSpec
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
-import org.scalatest.{ Matchers, WordSpecLike }
import spray.http.{ StatusCodes, HttpResponse, HttpRequest }
import spray.httpx.RequestBuilding
-class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
- with ScalaFutures with PatienceConfiguration with TestServer {
+class SprayServerMetricsSpec extends BaseKamonSpec("spray-server-metrics-spec") with RequestBuilding with ScalaFutures
+ with PatienceConfiguration with TestServer {
- val collectionContext = CollectionContext(100)
-
- implicit lazy val system: ActorSystem = ActorSystem("spray-server-metrics-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | metrics {
- | tick-interval = 1 hour
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ |
+ |akka.loggers = ["akka.event.slf4j.Slf4jLogger"]
+ """.stripMargin)
"the Spray Server metrics instrumentation" should {
- "record trace metrics for requests received" in {
- Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
+ "record trace metrics for processed requests" in {
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
@@ -58,15 +39,17 @@ class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers
client.expectMsgType[HttpResponse]
}
- val snapshot = Kamon(Metrics)(system).register(TraceMetrics("GET: /record-trace-metrics"), TraceMetrics.Factory).get.collect(collectionContext)
- snapshot.elapsedTime.numberOfMeasurements should be(15)
+ val snapshot = takeSnapshotOf("GET: /record-trace-metrics", "trace")
+ snapshot.histogram("elapsed-time").get.numberOfMeasurements should be(15)
}
- "record http serve metrics for all the requests" in {
- Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
+ "record http server metrics for all the requests" in {
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
+ // Erase metrics recorder from previous tests.
+ takeSnapshotOf("spray-server", "http-server")
+
for (repetition ← 1 to 10) {
client.send(connection, Get("/record-http-metrics"))
server.expectMsgType[HttpRequest]
@@ -81,11 +64,11 @@ class SprayServerMetricsSpec extends TestKitBase with WordSpecLike with Matchers
client.expectMsgType[HttpResponse]
}
- val snapshot = Kamon(Metrics)(system).register(HttpServerMetrics, HttpServerMetrics.Factory).get.collect(collectionContext)
- snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("200").count should be(10)
- snapshot.countsPerTraceAndStatusCode("GET: /record-http-metrics")("400").count should be(5)
- snapshot.countsPerStatusCode("200").count should be(10)
- snapshot.countsPerStatusCode("400").count should be(5)
+ val snapshot = takeSnapshotOf("spray-server", "http-server")
+ snapshot.counter("GET: /record-http-metrics_200").get.count should be(10)
+ snapshot.counter("GET: /record-http-metrics_400").get.count should be(5)
+ snapshot.counter("200").get.count should be(10)
+ snapshot.counter("400").get.count should be(5)
}
}
}
diff --git a/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
index 30d42eea..1ae0cb98 100644
--- a/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
+++ b/kamon-spray/src/test/scala/kamon/spray/SprayServerTracingSpec.scala
@@ -17,39 +17,15 @@
package kamon.spray
import _root_.spray.httpx.RequestBuilding
-import akka.testkit.{ TestKitBase, TestProbe }
-import akka.actor.ActorSystem
-import org.scalatest.{ Matchers, WordSpecLike }
+import akka.testkit.TestProbe
+import kamon.testkit.BaseKamonSpec
import kamon.Kamon
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
import spray.http.HttpHeaders.RawHeader
import spray.http.{ HttpResponse, HttpRequest }
-import com.typesafe.config.ConfigFactory
-
-class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers with RequestBuilding
- with ScalaFutures with PatienceConfiguration with TestServer {
-
- implicit lazy val system: ActorSystem = ActorSystem("spray-server-tracing-spec", ConfigFactory.parseString(
- """
- |akka {
- | loglevel = ERROR
- |}
- |
- |kamon {
- | metrics {
- | tick-interval = 2 seconds
- |
- | filters = [
- | {
- | trace {
- | includes = [ "*" ]
- | excludes = []
- | }
- | }
- | ]
- | }
- |}
- """.stripMargin))
+
+class SprayServerTracingSpec extends BaseKamonSpec("spray-server-tracing-spec") with RequestBuilding with ScalaFutures
+ with PatienceConfiguration with TestServer {
"the spray server request tracing instrumentation" should {
"include the trace-token header in responses when the automatic-trace-token-propagation is enabled" in {
@@ -58,12 +34,12 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
- client.send(connection, Get("/").withHeaders(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-enabled")))
+ client.send(connection, Get("/").withHeaders(traceTokenHeader("propagation-enabled")))
server.expectMsgType[HttpRequest]
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers should contain(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-enabled"))
+ response.headers should contain(traceTokenHeader("propagation-enabled"))
}
"reply back with an automatically assigned trace token if none was provided with the request and automatic-trace-token-propagation is enabled" in {
@@ -77,7 +53,7 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers.count(_.name == Kamon(Spray).traceTokenHeaderName) should be(1)
+ response.headers.count(_.name == Kamon(Spray).settings.traceTokenHeaderName) should be(1)
}
@@ -87,21 +63,24 @@ class SprayServerTracingSpec extends TestKitBase with WordSpecLike with Matchers
val (connection, server) = buildClientConnectionAndServer
val client = TestProbe()
- client.send(connection, Get("/").withHeaders(RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-disabled")))
+ client.send(connection, Get("/").withHeaders(traceTokenHeader("propagation-disabled")))
server.expectMsgType[HttpRequest]
server.reply(HttpResponse(entity = "ok"))
val response = client.expectMsgType[HttpResponse]
- response.headers should not contain RawHeader(Kamon(Spray).traceTokenHeaderName, "propagation-disabled")
+ response.headers should not contain traceTokenHeader("propagation-disabled")
}
}
+ def traceTokenHeader(token: String): RawHeader =
+ RawHeader(Kamon(Spray).settings.traceTokenHeaderName, token)
+
def enableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(true)
def disableAutomaticTraceTokenPropagation(): Unit = setIncludeTraceToken(false)
def setIncludeTraceToken(include: Boolean): Unit = {
- val target = Kamon(Spray)(system)
- val field = target.getClass.getDeclaredField("includeTraceToken")
+ val target = Kamon(Spray)(system).settings
+ val field = target.getClass.getDeclaredField("includeTraceTokenHeader")
field.setAccessible(true)
field.set(target, include)
}
diff --git a/kamon-statsd/src/main/resources/reference.conf b/kamon-statsd/src/main/resources/reference.conf
index a10ac735..32b99353 100644
--- a/kamon-statsd/src/main/resources/reference.conf
+++ b/kamon-statsd/src/main/resources/reference.conf
@@ -19,11 +19,12 @@ kamon {
# Subscription patterns used to select which metrics will be pushed to StatsD. Note that first, metrics
# collection for your desired entities must be activated under the kamon.metrics.filters settings.
- includes {
- actor = [ "*" ]
- trace = [ "*" ]
- dispatcher = [ "*" ]
- router = [ "*" ]
+ subscriptions {
+ trace = [ "**" ]
+ actor = [ "**" ]
+ dispatcher = [ "**" ]
+ user-metric = [ "**" ]
+ system-metric = [ "**" ]
}
# Enable system metrics
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala b/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
index 28354423..0fce855c 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/SimpleMetricKeyGenerator.scala
@@ -3,11 +3,10 @@ package kamon.statsd
import java.lang.management.ManagementFactory
import com.typesafe.config.Config
-import kamon.metric.UserMetrics.UserMetricGroup
-import kamon.metric.{ MetricIdentity, MetricGroupIdentity }
+import kamon.metric.{ MetricKey, Entity }
trait MetricKeyGenerator {
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String
+ def generateKey(entity: Entity, metricKey: MetricKey): String
}
class SimpleMetricKeyGenerator(config: Config) extends MetricKeyGenerator {
@@ -27,16 +26,11 @@ class SimpleMetricKeyGenerator(config: Config) extends MetricKeyGenerator {
if (includeHostname) s"$application.$normalizedHostname"
else application
- def generateKey(groupIdentity: MetricGroupIdentity, metricIdentity: MetricIdentity): String = {
- val normalizedGroupName = normalizer(groupIdentity.name)
- val key = s"${baseName}.${groupIdentity.category.name}.${normalizedGroupName}"
-
- if (isUserMetric(groupIdentity)) key
- else s"${key}.${metricIdentity.name}"
+ def generateKey(entity: Entity, metricKey: MetricKey): String = {
+ val normalizedGroupName = normalizer(entity.name)
+ s"${baseName}.${entity.category}.${normalizedGroupName}.${metricKey.name}"
}
- def isUserMetric(groupIdentity: MetricGroupIdentity): Boolean = groupIdentity.isInstanceOf[UserMetricGroup]
-
def hostName: String = ManagementFactory.getRuntimeMXBean.getName.split('@')(1)
def createNormalizer(strategy: String): Normalizer = strategy match {
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
index 2505f06a..e5a15a9d 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsD.scala
@@ -18,17 +18,14 @@ package kamon.statsd
import akka.actor._
import kamon.Kamon
-import kamon.akka.{RouterMetrics, DispatcherMetrics, ActorMetrics}
-import kamon.http.HttpServerMetrics
-import kamon.metric.UserMetrics._
import kamon.metric._
-import kamon.metrics._
+import kamon.util.ConfigTools.Syntax
import scala.concurrent.duration._
-import scala.collection.JavaConverters._
import com.typesafe.config.Config
import akka.event.Logging
import java.net.InetSocketAddress
import java.util.concurrent.TimeUnit.MILLISECONDS
+import scala.collection.JavaConverters._
object StatsD extends ExtensionId[StatsDExtension] with ExtensionIdProvider {
override def lookup(): ExtensionId[_ <: Extension] = StatsD
@@ -36,6 +33,8 @@ object StatsD extends ExtensionId[StatsDExtension] with ExtensionIdProvider {
}
class StatsDExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+ implicit val as = system
+
val log = Logging(system, classOf[StatsDExtension])
log.info("Starting the Kamon(StatsD) extension")
@@ -50,57 +49,11 @@ class StatsDExtension(system: ExtendedActorSystem) extends Kamon.Extension {
val statsDMetricsListener = buildMetricsListener(tickInterval, flushInterval, keyGeneratorFQCN, config)
- // Subscribe to all user metrics
- Kamon(Metrics)(system).subscribe(UserHistograms, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserCounters, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserMinMaxCounters, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(UserGauges, "*", statsDMetricsListener, permanently = true)
-
- // Subscribe to server metrics
- Kamon(Metrics)(system).subscribe(HttpServerMetrics.category, "*", statsDMetricsListener, permanently = true)
-
- // Subscribe to Actors
- val includedActors = statsDConfig.getStringList("includes.actor").asScala
- for (actorPathPattern ← includedActors) {
- Kamon(Metrics)(system).subscribe(ActorMetrics, actorPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Routers
- val includedRouters = statsDConfig.getStringList("includes.router").asScala
- for (routerPathPattern ← includedRouters) {
- Kamon(Metrics)(system).subscribe(RouterMetrics, routerPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Traces
- val includedTraces = statsDConfig.getStringList("includes.trace").asScala
- for (tracePathPattern ← includedTraces) {
- Kamon(Metrics)(system).subscribe(TraceMetrics, tracePathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to Dispatchers
- val includedDispatchers = statsDConfig.getStringList("includes.dispatcher").asScala
- for (dispatcherPathPattern ← includedDispatchers) {
- Kamon(Metrics)(system).subscribe(DispatcherMetrics, dispatcherPathPattern, statsDMetricsListener, permanently = true)
- }
-
- // Subscribe to SystemMetrics
- val includeSystemMetrics = statsDConfig.getBoolean("report-system-metrics")
- if (includeSystemMetrics) {
- //OS
- Kamon(Metrics)(system).subscribe(CPUMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ProcessCPUMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(MemoryMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(NetworkMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(DiskMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ContextSwitchesMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(LoadAverageMetrics, "*", statsDMetricsListener, permanently = true)
-
- //JVM
- Kamon(Metrics)(system).subscribe(HeapMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(NonHeapMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ThreadMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(ClassLoadingMetrics, "*", statsDMetricsListener, permanently = true)
- Kamon(Metrics)(system).subscribe(GCMetrics, "*", statsDMetricsListener, permanently = true)
+ val subscriptions = statsDConfig.getConfig("subscriptions")
+ subscriptions.firstLevelKeys.map { subscriptionCategory ⇒
+ subscriptions.getStringList(subscriptionCategory).asScala.foreach { pattern ⇒
+ Kamon(Metrics).subscribe(subscriptionCategory, pattern, statsDMetricsListener, permanently = true)
+ }
}
def buildMetricsListener(tickInterval: Long, flushInterval: Long, keyGeneratorFQCN: String, config: Config): ActorRef = {
diff --git a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
index 2aac3a52..3241e1f3 100644
--- a/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
+++ b/kamon-statsd/src/main/scala/kamon/statsd/StatsDMetricsSender.scala
@@ -20,7 +20,7 @@ import akka.actor.{ ActorSystem, Props, ActorRef, Actor }
import akka.io.{ Udp, IO }
import java.net.InetSocketAddress
import akka.util.ByteString
-import kamon.metric.Subscriptions.TickMetricSnapshot
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.text.{ DecimalFormatSymbols, DecimalFormat }
import java.util.Locale
@@ -51,11 +51,11 @@ class StatsDMetricsSender(remote: InetSocketAddress, maxPacketSizeInBytes: Long,
val packetBuilder = new MetricDataPacketBuilder(maxPacketSizeInBytes, udpSender, remote)
for (
- (groupIdentity, groupSnapshot) ← tick.metrics;
- (metricIdentity, metricSnapshot) ← groupSnapshot.metrics
+ (entity, snapshot) ← tick.metrics;
+ (metricKey, metricSnapshot) ← snapshot.metrics
) {
- val key = metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ val key = metricKeyGenerator.generateKey(entity, metricKey)
metricSnapshot match {
case hs: Histogram.Snapshot ⇒
diff --git a/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala b/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
index ed3fae5b..0edeb3df 100644
--- a/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
+++ b/kamon-statsd/src/test/scala/kamon/statsd/SimpleMetricKeyGeneratorSpec.scala
@@ -1,7 +1,8 @@
package kamon.statsd
import com.typesafe.config.ConfigFactory
-import kamon.metric.{ MetricGroupCategory, MetricGroupIdentity, MetricIdentity }
+import kamon.metric.instrument.UnitOfMeasurement
+import kamon.metric._
import org.scalatest.{ Matchers, WordSpec }
class SimpleMetricKeyGeneratorSpec extends WordSpec with Matchers {
@@ -68,13 +69,8 @@ class SimpleMetricKeyGeneratorSpec extends WordSpec with Matchers {
}
def buildMetricKey(categoryName: String, entityName: String, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
- val metricIdentity = new MetricIdentity { val name: String = metricName }
- val groupIdentity = new MetricGroupIdentity {
- val name: String = entityName
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = categoryName
- }
- }
- metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ val metric = HistogramKey(metricName, UnitOfMeasurement.Unknown, Map.empty)
+ val entity = Entity(entityName, categoryName)
+ metricKeyGenerator.generateKey(entity, metric)
}
}
diff --git a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
index 6c77f321..a0d787d9 100644
--- a/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
+++ b/kamon-statsd/src/test/scala/kamon/statsd/StatsDMetricSenderSpec.scala
@@ -18,14 +18,13 @@ package kamon.statsd
import akka.testkit.{ TestKitBase, TestProbe }
import akka.actor.{ ActorRef, Props, ActorSystem }
-import kamon.{ MilliTimestamp, Kamon }
-import kamon.metric.instrument.Histogram.Precision
-import kamon.metric.instrument.Histogram
+import kamon.Kamon
+import kamon.metric.instrument.{ InstrumentFactory, UnitOfMeasurement }
+import kamon.util.MilliTimestamp
import org.scalatest.{ Matchers, WordSpecLike }
import kamon.metric._
import akka.io.Udp
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import java.lang.management.ManagementFactory
+import kamon.metric.SubscriptionsDispatcher.TickMetricSnapshot
import java.net.InetSocketAddress
import com.typesafe.config.ConfigFactory
@@ -33,10 +32,6 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
implicit lazy val system: ActorSystem = ActorSystem("statsd-metric-sender-spec", ConfigFactory.parseString(
"""
|kamon {
- | metrics {
- | disable-aspectj-weaver-missing-error = true
- | }
- |
| statsd.simple-metric-key-generator {
| application = kamon
| hostname-override = kamon-host
@@ -56,58 +51,54 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
"the StatsDMetricSender" should {
"flush the metrics data after processing the tick, even if the max-packet-size is not reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms")
}
"render several measurements of the same key under a single (key + multiple measurements) packet" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(11L)
- testRecorder.record(12L)
-
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(11L)
+ testRecorder.metricOne.record(12L)
+
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms:11|ms:12|ms")
}
"include the correspondent sampling rate when rendering multiple occurrences of the same value" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- testRecorder.record(10L)
- testRecorder.record(10L)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(10L)
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
data.utf8String should be(s"$testMetricKey:10|ms|@0.5")
}
"flush the packet when the max-packet-size is reached" in new UdpListenerFixture {
- val testMetricName = "processing-time"
- val testMetricKey = buildMetricKey("actor", "/user/kamon", testMetricName)
- val testRecorder = Histogram(10000L, Precision.Normal, Scale.Unit)
+ val testMetricKey = buildMetricKey(testEntity, "metric-one")
+ val testRecorder = buildRecorder("user/kamon")
var bytes = testMetricKey.length
var level = 0
while (bytes <= testMaxPacketSize) {
level += 1
- testRecorder.record(level)
+ testRecorder.metricOne.record(level)
bytes += s":$level|ms".length
}
- val udp = setup(Map(testMetricName -> testRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
udp.expectMsgType[Udp.Send] // let the first flush pass
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
@@ -115,51 +106,38 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
}
"render multiple keys in the same packet using newline as separator" in new UdpListenerFixture {
- val firstTestMetricName = "first-test-metric"
- val firstTestMetricKey = buildMetricKey("actor", "/user/kamon", firstTestMetricName)
- val secondTestMetricName = "second-test-metric"
- val secondTestMetricKey = buildMetricKey("actor", "/user/kamon", secondTestMetricName)
+ val testMetricKey1 = buildMetricKey(testEntity, "metric-one")
+ val testMetricKey2 = buildMetricKey(testEntity, "metric-two")
+ val testRecorder = buildRecorder("user/kamon")
- val firstTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
- val secondTestRecorder = Histogram(1000L, Precision.Normal, Scale.Unit)
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(10L)
+ testRecorder.metricOne.record(11L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(10L)
- firstTestRecorder.record(11L)
+ testRecorder.metricTwo.record(20L)
+ testRecorder.metricTwo.record(21L)
- secondTestRecorder.record(20L)
- secondTestRecorder.record(21L)
-
- val udp = setup(Map(
- firstTestMetricName -> firstTestRecorder.collect(collectionContext),
- secondTestMetricName -> secondTestRecorder.collect(collectionContext)))
+ val udp = setup(Map(testEntity -> testRecorder.collect(collectionContext)))
val Udp.Send(data, _, _) = udp.expectMsgType[Udp.Send]
- data.utf8String should be(s"$firstTestMetricKey:10|ms|@0.5:11|ms\n$secondTestMetricKey:20|ms:21|ms")
+ data.utf8String should be(s"$testMetricKey1:10|ms|@0.5:11|ms\n$testMetricKey2:20|ms:21|ms")
}
}
trait UdpListenerFixture {
val testMaxPacketSize = system.settings.config.getBytes("kamon.statsd.max-packet-size")
- val testGroupIdentity = new MetricGroupIdentity {
- val name: String = "/user/kamon"
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = "actor"
- }
+ val testEntity = Entity("user/kamon", "test")
+
+ def buildMetricKey(entity: Entity, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
+ val metricKey = HistogramKey(metricName, UnitOfMeasurement.Unknown, Map.empty)
+ metricKeyGenerator.generateKey(entity, metricKey)
}
- def buildMetricKey(categoryName: String, entityName: String, metricName: String)(implicit metricKeyGenerator: SimpleMetricKeyGenerator): String = {
- val metricIdentity = new MetricIdentity { val name: String = metricName }
- val groupIdentity = new MetricGroupIdentity {
- val name: String = entityName
- val category: MetricGroupCategory = new MetricGroupCategory {
- val name: String = categoryName
- }
- }
- metricKeyGenerator.generateKey(groupIdentity, metricIdentity)
+ def buildRecorder(name: String): TestEntityRecorder = {
+ Kamon(Metrics).register(TestEntityRecorder, name).get.recorder
}
- def setup(metrics: Map[String, MetricSnapshot]): TestProbe = {
+ def setup(metrics: Map[Entity, EntitySnapshot]): TestProbe = {
val udp = TestProbe()
val metricsSender = system.actorOf(Props(new StatsDMetricsSender(new InetSocketAddress("127.0.0.1", 0), testMaxPacketSize, metricKeyGenerator) {
override def udpExtension(implicit system: ActorSystem): ActorRef = udp.ref
@@ -169,22 +147,19 @@ class StatsDMetricSenderSpec extends TestKitBase with WordSpecLike with Matchers
udp.expectMsgType[Udp.SimpleSender]
udp.reply(Udp.SimpleSenderReady)
- val testMetrics = for ((metricName, snapshot) ← metrics) yield {
- val testMetricIdentity = new MetricIdentity {
- val name: String = metricName
- }
-
- (testMetricIdentity, snapshot)
- }
-
- metricsSender ! TickMetricSnapshot(new MilliTimestamp(0), new MilliTimestamp(0), Map(testGroupIdentity -> new MetricGroupSnapshot {
- type GroupSnapshotType = Histogram.Snapshot
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = ???
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = testMetrics.toMap
- }))
-
+ val fakeSnapshot = TickMetricSnapshot(MilliTimestamp.now, MilliTimestamp.now, metrics)
+ metricsSender ! fakeSnapshot
udp
}
}
}
+
+class TestEntityRecorder(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val metricOne = histogram("metric-one")
+ val metricTwo = histogram("metric-two")
+}
+
+object TestEntityRecorder extends EntityRecorderFactory[TestEntityRecorder] {
+ def category: String = "test"
+ def createRecorder(instrumentFactory: InstrumentFactory): TestEntityRecorder = new TestEntityRecorder(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/resources/reference.conf b/kamon-system-metrics/src/main/resources/reference.conf
index ba439024..7f65e477 100644
--- a/kamon-system-metrics/src/main/resources/reference.conf
+++ b/kamon-system-metrics/src/main/resources/reference.conf
@@ -2,129 +2,180 @@
# Kamon-System-Metrics Reference Configuration #
# ============================================ #
-# Sigar provisioner native library extract location.
-# Use per-application-instance scoped location, such as program working directory.
-kamon.sigar.folder = ${user.dir}"/native"
-
kamon {
system-metrics {
- default-gauge-precision {
- refresh-interval = 1 second
- highest-trackable-value = 999999999
- significant-value-digits = 2
+ # Sigar provisioner native library extract location. Use per-application-instance scoped location, such as program
+ # working directory.
+ sigar-native-folder = ${user.dir}"/native"
+
+ # Frequency with which all Sigar-based metrics will be updated. Setting this value to less than 1 second
+ # might cause some Sigar metrics to behave incorrectly.
+ sigar-metrics-refresh-interval = 1 second
+
+ # Frequency with which context-switches metrics will be updated.
+ context-switches-refresh-interval = 1 second
+
+ # Dispatcher to be used by the SigarMetricsUpdater actor.
+ sigar-dispatcher {
+ executor = "thread-pool-executor"
+ type = PinnedDispatcher
}
- # Default dispatcher for all system-metrics module operations
- dispatcher = ${kamon.default-dispatcher}
+ # Dispatcher to be used by the ContextSwitchesUpdater actor.
+ context-switches-dispatcher {
+ executor = "thread-pool-executor"
+ type = PinnedDispatcher
+ }
}
- metrics {
- precision {
-
- system {
- process-cpu {
- cpu-percentage = {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- total-process-time = {
- highest-trackable-value = 999999999
- significant-value-digits = 2
- }
- }
-
- cpu {
- user = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- system = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- wait = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- idle = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- stolen = {
- highest-trackable-value = 999
- significant-value-digits = 2
- }
- }
-
- network {
- rx-bytes = ${kamon.metrics.precision.default-histogram-precision}
- tx-bytes = ${kamon.metrics.precision.default-histogram-precision}
- rx-errors = ${kamon.metrics.precision.default-histogram-precision}
- tx-errors = ${kamon.metrics.precision.default-histogram-precision}
- rx-dropped = ${kamon.metrics.precision.default-histogram-precision}
- tx-dropped = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- memory {
- used = ${kamon.metrics.precision.default-histogram-precision}
- free = ${kamon.metrics.precision.default-histogram-precision}
- buffer = ${kamon.metrics.precision.default-histogram-precision}
- cache = ${kamon.metrics.precision.default-histogram-precision}
- swap-used = ${kamon.metrics.precision.default-histogram-precision}
- swap-free = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- context-switches {
- per-process-voluntary = ${kamon.metrics.precision.default-histogram-precision}
- per-process-non-voluntary = ${kamon.metrics.precision.default-histogram-precision}
- global = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- disk {
- reads = ${kamon.metrics.precision.default-histogram-precision}
- writes = ${kamon.metrics.precision.default-histogram-precision}
- queue = ${kamon.metrics.precision.default-histogram-precision}
- service-time = ${kamon.metrics.precision.default-histogram-precision}
- }
-
- load-average {
- one = ${kamon.metrics.precision.default-histogram-precision}
- five = ${kamon.metrics.precision.default-histogram-precision}
- fifteen = ${kamon.metrics.precision.default-histogram-precision}
- }
+ metrics.instrument-settings {
+ system-metric {
+
+ #
+ # CPU
+ #
+ cpu-user {
+ highest-trackable-value = 100
+ }
+
+ cpu-system = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-wait = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-idle = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ cpu-stolen = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+
+
+ #
+ # Process CPU
+ #
+ process-user-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ process-system-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+ process-cpu = ${kamon.metrics.instrument-settings.system-metric.cpu-user}
+
+
+ #
+ # Garbage Collection
+ #
+ garbage-collection-count {
+ highest-trackable-value = 1000000
+ refresh-interval = 1 second
+ }
+
+ garbage-collection-time {
+ highest-trackable-value = 3600000
+ refresh-interval = 1 second
+ }
+
+
+ #
+ # Heap Memory
+ #
+ heap-used {
+ # 50 GB, which is way too much for a non-Zing JVM
+ highest-trackable-value = 5368709120
+ refresh-interval = 1 second
}
- jvm {
- heap {
- used = ${kamon.system-metrics.default-gauge-precision}
- max = ${kamon.system-metrics.default-gauge-precision}
- committed = ${kamon.system-metrics.default-gauge-precision}
- }
-
- non-heap {
- used = ${kamon.system-metrics.default-gauge-precision}
- max = ${kamon.system-metrics.default-gauge-precision}
- committed = ${kamon.system-metrics.default-gauge-precision}
- }
-
- thread {
- daemon = ${kamon.system-metrics.default-gauge-precision}
- count = ${kamon.system-metrics.default-gauge-precision}
- peak = ${kamon.system-metrics.default-gauge-precision}
- }
-
- classes {
- total-loaded = ${kamon.system-metrics.default-gauge-precision}
- total-unloaded = ${kamon.system-metrics.default-gauge-precision}
- current-loaded = ${kamon.system-metrics.default-gauge-precision}
- }
-
- gc {
- count = ${kamon.metrics.precision.default-histogram-precision}
- time = ${kamon.metrics.precision.default-histogram-precision}
- }
+ heap-max = ${kamon.metrics.instrument-settings.system-metric.heap-used}
+ heap-committed = ${kamon.metrics.instrument-settings.system-metric.heap-used}
+
+
+ #
+ # Non-Heap Memory
+ #
+ non-heap-used {
+ highest-trackable-value = 5368709120
+ refresh-interval = 1 second
+ }
+ non-heap-max = ${kamon.metrics.instrument-settings.system-metric.non-heap-used}
+ non-heap-committed = ${kamon.metrics.instrument-settings.system-metric.non-heap-used}
+
+
+ #
+ # JVM Threads
+ #
+ thread-count {
+ highest-trackable-value = 10000
+ refresh-interval = 1 second
+ }
+
+ daemon-thread-count = ${kamon.metrics.instrument-settings.system-metric.thread-count}
+ peak-thread-count = ${kamon.metrics.instrument-settings.system-metric.thread-count}
+
+
+ #
+ # Class Loading
+ #
+ classes-loaded {
+ highest-trackable-value = 10000000
+ refresh-interval = 1 second
}
+
+ classes-unloaded = ${kamon.metrics.instrument-settings.system-metric.classes-loaded}
+ classes-currently-loaded = ${kamon.metrics.instrument-settings.system-metric.classes-loaded}
+
+
+ #
+ # File System
+ #
+ file-system-reads {
+ highest-trackable-value = 107374182400
+ }
+
+ file-system-writes = ${kamon.metrics.instrument-settings.system-metric.file-system-reads}
+
+
+ #
+ # Load Average
+ #
+ one-minute {
+ highest-trackable-value = 10000
+ }
+
+ five-minutes = ${kamon.metrics.instrument-settings.system-metric.one-minute}
+ fifteen-minutes = ${kamon.metrics.instrument-settings.system-metric.one-minute}
+
+
+ #
+ # System Memory
+ #
+ memory-used {
+ highest-trackable-value = 5368709120
+ }
+
+ memory-free = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+ swap-free = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+ swap-used = ${kamon.metrics.instrument-settings.system-metric.memory-used}
+
+
+ #
+ # Network
+ #
+ tx-bytes {
+ highest-trackable-value = 107374182400
+ }
+
+ rx-bytes = ${kamon.metrics.instrument-settings.system-metric.tx-bytes}
+
+ tx-errors {
+ highest-trackable-value = 10000000
+ }
+
+ rx-errors = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+ tx-dropped = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+ rx-dropped = ${kamon.metrics.instrument-settings.system-metric.tx-errors}
+
+
+ #
+ # Context Switches
+ #
+ context-switches-process-voluntary {
+ highest-trackable-value = 10000000
+ }
+
+ context-switches-process-non-voluntary = ${kamon.metrics.instrument-settings.system-metric.context-switches-process-voluntary}
+ context-switches-global = ${kamon.metrics.instrument-settings.system-metric.context-switches-process-voluntary}
+
}
}
} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
deleted file mode 100644
index 20789039..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/CPUMetrics.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class CPUMetrics(name: String) extends MetricGroupIdentity {
- val category = CPUMetrics
-}
-
-object CPUMetrics extends MetricGroupCategory {
- val name = "cpu"
-
- case object User extends MetricIdentity { val name = "user" }
- case object System extends MetricIdentity { val name = "system" }
- case object Wait extends MetricIdentity { val name = "wait" }
- case object Idle extends MetricIdentity { val name = "idle" }
- case object Stolen extends MetricIdentity { val name = "stolen" }
-
- case class CPUMetricRecorder(user: Histogram, system: Histogram, cpuWait: Histogram, idle: Histogram, stolen: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- CPUMetricSnapshot(user.collect(context), system.collect(context), cpuWait.collect(context), idle.collect(context), stolen.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class CPUMetricSnapshot(user: Histogram.Snapshot, system: Histogram.Snapshot, cpuWait: Histogram.Snapshot, idle: Histogram.Snapshot, stolen: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = CPUMetricSnapshot
-
- def merge(that: CPUMetricSnapshot, context: CollectionContext): GroupSnapshotType = {
- CPUMetricSnapshot(user.merge(that.user, context), system.merge(that.system, context), cpuWait.merge(that.cpuWait, context), idle.merge(that.idle, context), stolen.merge(that.stolen, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- User -> user,
- System -> system,
- Wait -> cpuWait,
- Idle -> idle,
- Stolen -> stolen)
- }
-
- val Factory = CPUMetricGroupFactory
-}
-
-case object CPUMetricGroupFactory extends MetricGroupFactory {
-
- import CPUMetrics._
-
- type GroupRecorder = CPUMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.cpu")
-
- val userConfig = settings.getConfig("user")
- val systemConfig = settings.getConfig("system")
- val cpuWaitConfig = settings.getConfig("wait")
- val idleConfig = settings.getConfig("idle")
- val stolenConfig = settings.getConfig("stolen")
-
- new CPUMetricRecorder(
- Histogram.fromConfig(userConfig),
- Histogram.fromConfig(systemConfig),
- Histogram.fromConfig(cpuWaitConfig),
- Histogram.fromConfig(idleConfig),
- Histogram.fromConfig(stolenConfig))
- }
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ClassLoadingMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ClassLoadingMetrics.scala
deleted file mode 100644
index 1e3bee27..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ClassLoadingMetrics.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.ManagementFactory
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class ClassLoadingMetrics(name: String) extends MetricGroupIdentity {
- val category = ClassLoadingMetrics
-}
-
-object ClassLoadingMetrics extends MetricGroupCategory {
- val name = "classes"
-
- case object Loaded extends MetricIdentity { val name = "total-loaded" }
- case object Unloaded extends MetricIdentity { val name = "total-unloaded" }
- case object Current extends MetricIdentity { val name = "current-loaded" }
-
- case class ClassLoadingMetricRecorder(loaded: Gauge, unloaded: Gauge, current: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ClassLoadingMetricSnapshot(loaded.collect(context), unloaded.collect(context), current.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ClassLoadingMetricSnapshot(loaded: Histogram.Snapshot, unloaded: Histogram.Snapshot, current: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ClassLoadingMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- ClassLoadingMetricSnapshot(loaded.merge(that.loaded, context), unloaded.merge(that.unloaded, context), current.merge(that.current, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Loaded -> loaded,
- Unloaded -> unloaded,
- Current -> current)
- }
-
- val Factory = ClassLoadingMetricGroupFactory
-}
-
-case object ClassLoadingMetricGroupFactory extends MetricGroupFactory {
-
- import ClassLoadingMetrics._
-
- val classes = ManagementFactory.getClassLoadingMXBean
-
- type GroupRecorder = ClassLoadingMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.classes")
-
- val totalLoadedConfig = settings.getConfig("total-loaded")
- val totalUnloadedConfig = settings.getConfig("total-unloaded")
- val currentLoadedConfig = settings.getConfig("current-loaded")
-
- new ClassLoadingMetricRecorder(
- Gauge.fromConfig(totalLoadedConfig, system)(() ⇒ classes.getTotalLoadedClassCount),
- Gauge.fromConfig(totalUnloadedConfig, system)(() ⇒ classes.getUnloadedClassCount),
- Gauge.fromConfig(currentLoadedConfig, system)(() ⇒ classes.getLoadedClassCount.toLong))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala
deleted file mode 100644
index 86aeabce..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ContextSwitchesMetrics.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class ContextSwitchesMetrics(name: String) extends MetricGroupIdentity {
- val category = ContextSwitchesMetrics
-}
-
-object ContextSwitchesMetrics extends MetricGroupCategory {
- val name = "context-switches"
-
- case object PerProcessVoluntary extends MetricIdentity { val name = "per-process-voluntary" }
- case object PerProcessNonVoluntary extends MetricIdentity { val name = "per-process-non-voluntary" }
- case object Global extends MetricIdentity { val name = "global" }
-
- case class ContextSwitchesMetricsRecorder(perProcessVoluntary: Histogram, perProcessNonVoluntary: Histogram, global: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ContextSwitchesMetricsSnapshot(perProcessVoluntary.collect(context), perProcessNonVoluntary.collect(context), global.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ContextSwitchesMetricsSnapshot(perProcessVoluntary: Histogram.Snapshot, perProcessNonVoluntary: Histogram.Snapshot, global: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ContextSwitchesMetricsSnapshot
-
- def merge(that: ContextSwitchesMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
- ContextSwitchesMetricsSnapshot(perProcessVoluntary.merge(that.perProcessVoluntary, context), perProcessVoluntary.merge(that.perProcessVoluntary, context), global.merge(that.global, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- PerProcessVoluntary -> perProcessVoluntary,
- PerProcessNonVoluntary -> perProcessNonVoluntary,
- Global -> global)
- }
-
- val Factory = ContextSwitchesMetricGroupFactory
-}
-
-case object ContextSwitchesMetricGroupFactory extends MetricGroupFactory {
- import ContextSwitchesMetrics._
-
- type GroupRecorder = ContextSwitchesMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.context-switches")
-
- val perProcessVoluntary = settings.getConfig("per-process-voluntary")
- val perProcessNonVoluntary = settings.getConfig("per-process-non-voluntary")
- val global = settings.getConfig("global")
-
- new ContextSwitchesMetricsRecorder(
- Histogram.fromConfig(perProcessVoluntary),
- Histogram.fromConfig(perProcessNonVoluntary),
- Histogram.fromConfig(global))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/DiskMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/DiskMetrics.scala
deleted file mode 100644
index eeb6002b..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/DiskMetrics.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class DiskMetrics(name: String) extends MetricGroupIdentity {
- val category = DiskMetrics
-}
-
-object DiskMetrics extends MetricGroupCategory {
- val name = "disk"
-
- case object Reads extends MetricIdentity { val name = "reads" }
- case object Writes extends MetricIdentity { val name = "writes" }
- case object Queue extends MetricIdentity { val name = "queue" }
- case object ServiceTime extends MetricIdentity { val name = "service-time" }
-
- case class DiskMetricsRecorder(reads: Histogram, writes: Histogram, queue: Histogram, serviceTime: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- DiskMetricsSnapshot(reads.collect(context), writes.collect(context), queue.collect(context), serviceTime.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class DiskMetricsSnapshot(reads: Histogram.Snapshot, writes: Histogram.Snapshot, queue: Histogram.Snapshot, serviceTime: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = DiskMetricsSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- DiskMetricsSnapshot(reads.merge(that.reads, context), writes.merge(that.writes, context), queue.merge(that.queue, context), serviceTime.merge(that.serviceTime, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Reads -> reads,
- Writes -> writes,
- Queue -> queue,
- ServiceTime -> serviceTime)
- }
-
- val Factory = DiskMetricGroupFactory
-}
-
-case object DiskMetricGroupFactory extends MetricGroupFactory {
-
- import DiskMetrics._
-
- type GroupRecorder = DiskMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.disk")
-
- val readsDiskConfig = settings.getConfig("reads")
- val writesDiskConfig = settings.getConfig("writes")
- val queueDiskConfig = settings.getConfig("queue")
- val serviceTimeDiskConfig = settings.getConfig("service-time")
-
- new DiskMetricsRecorder(
- Histogram.fromConfig(readsDiskConfig),
- Histogram.fromConfig(writesDiskConfig),
- Histogram.fromConfig(queueDiskConfig),
- Histogram.fromConfig(serviceTimeDiskConfig))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
deleted file mode 100644
index 5aa679c9..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/GCMetrics.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.GarbageCollectorMXBean
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class GCMetrics(name: String) extends MetricGroupIdentity {
- val category = GCMetrics
-}
-
-object GCMetrics extends MetricGroupCategory {
- val name = "gc"
-
- case object CollectionCount extends MetricIdentity { val name = "collection-count" }
- case object CollectionTime extends MetricIdentity { val name = "collection-time" }
-
- case class GCMetricRecorder(count: Histogram, time: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- GCMetricSnapshot(count.collect(context), time.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class GCMetricSnapshot(count: Histogram.Snapshot, time: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = GCMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- GCMetricSnapshot(count.merge(that.count, context), time.merge(that.time, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- CollectionCount -> count,
- CollectionTime -> time)
- }
-
- def Factory(gc: GarbageCollectorMXBean) = GCMetricGroupFactory(gc)
-}
-
-case class GCMetricGroupFactory(gc: GarbageCollectorMXBean) extends MetricGroupFactory {
- import GCMetrics._
-
- type GroupRecorder = GCMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.gc")
-
- val countConfig = settings.getConfig("count")
- val timeConfig = settings.getConfig("time")
-
- new GCMetricRecorder(
- Histogram.fromConfig(countConfig),
- Histogram.fromConfig(timeConfig))
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
deleted file mode 100644
index 5bba5bf6..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/HeapMetrics.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.ManagementFactory
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class HeapMetrics(name: String) extends MetricGroupIdentity {
- val category = HeapMetrics
-}
-
-object HeapMetrics extends MetricGroupCategory {
- val name = "heap"
-
- case object Used extends MetricIdentity { val name = "used" }
- case object Max extends MetricIdentity { val name = "max" }
- case object Committed extends MetricIdentity { val name = "committed" }
-
- case class HeapMetricRecorder(used: Gauge, max: Gauge, committed: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- HeapMetricSnapshot(used.collect(context), max.collect(context), committed.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class HeapMetricSnapshot(used: Histogram.Snapshot, max: Histogram.Snapshot, committed: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = HeapMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- HeapMetricSnapshot(used.merge(that.used, context), max.merge(that.max, context), committed.merge(that.committed, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Used -> used,
- Max -> max,
- Committed -> committed)
- }
-
- val Factory = HeapMetricGroupFactory
-}
-
-case object HeapMetricGroupFactory extends MetricGroupFactory {
-
- import HeapMetrics._
- import kamon.system.SystemMetricsExtension._
-
- def heap = ManagementFactory.getMemoryMXBean.getHeapMemoryUsage
-
- type GroupRecorder = HeapMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.heap")
-
- val usedHeapConfig = settings.getConfig("used")
- val maxHeapConfig = settings.getConfig("max")
- val committedHeapConfig = settings.getConfig("committed")
-
- new HeapMetricRecorder(
- Gauge.fromConfig(usedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getUsed)),
- Gauge.fromConfig(maxHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getMax)),
- Gauge.fromConfig(committedHeapConfig, system, Scale.Mega)(() ⇒ toMB(heap.getCommitted)))
- }
-
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/LoadAverageMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/LoadAverageMetrics.scala
deleted file mode 100644
index cd196adf..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/LoadAverageMetrics.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class LoadAverageMetrics(name: String) extends MetricGroupIdentity {
- val category = LoadAverageMetrics
-}
-
-object LoadAverageMetrics extends MetricGroupCategory {
- val name = "load-average"
-
- case object OneMinute extends MetricIdentity { val name = "last-minute" }
- case object FiveMinutes extends MetricIdentity { val name = "last-five-minutes" }
- case object FifteenMinutes extends MetricIdentity { val name = "last-fifteen-minutes" }
-
- case class LoadAverageMetricsRecorder(one: Histogram, five: Histogram, fifteen: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- LoadAverageMetricsSnapshot(one.collect(context), five.collect(context), fifteen.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class LoadAverageMetricsSnapshot(one: Histogram.Snapshot, five: Histogram.Snapshot, fifteen: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = LoadAverageMetricsSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- LoadAverageMetricsSnapshot(one.merge(that.one, context), five.merge(that.five, context), fifteen.merge(that.fifteen, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- OneMinute -> one,
- FiveMinutes -> five,
- FifteenMinutes -> fifteen)
- }
-
- val Factory = LoadAverageMetricGroupFactory
-}
-
-case object LoadAverageMetricGroupFactory extends MetricGroupFactory {
-
- import LoadAverageMetrics._
-
- type GroupRecorder = LoadAverageMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.load-average")
-
- val oneMinuteConfig = settings.getConfig("one")
- val fiveMinutesConfig = settings.getConfig("five")
- val fifteenMinutesConfig = settings.getConfig("fifteen")
-
- new LoadAverageMetricsRecorder(
- Histogram.fromConfig(oneMinuteConfig),
- Histogram.fromConfig(fiveMinutesConfig),
- Histogram.fromConfig(fifteenMinutesConfig))
- }
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
deleted file mode 100644
index 14051427..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/MemoryMetrics.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class MemoryMetrics(name: String) extends MetricGroupIdentity {
- val category = MemoryMetrics
-}
-
-object MemoryMetrics extends MetricGroupCategory {
- val name = "memory"
-
- case object Used extends MetricIdentity { val name = "used" }
- case object Free extends MetricIdentity { val name = "free" }
- case object Buffer extends MetricIdentity { val name = "buffer" }
- case object Cache extends MetricIdentity { val name = "cache" }
- case object SwapUsed extends MetricIdentity { val name = "swap-used" }
- case object SwapFree extends MetricIdentity { val name = "swap-free" }
-
- case class MemoryMetricRecorder(used: Histogram, free: Histogram, buffer: Histogram, cache: Histogram, swapUsed: Histogram, swapFree: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- MemoryMetricSnapshot(used.collect(context), free.collect(context), buffer.collect(context), cache.collect(context), swapUsed.collect(context), swapFree.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class MemoryMetricSnapshot(used: Histogram.Snapshot, free: Histogram.Snapshot, buffer: Histogram.Snapshot, cache: Histogram.Snapshot, swapUsed: Histogram.Snapshot, swapFree: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = MemoryMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- MemoryMetricSnapshot(used.merge(that.used, context), free.merge(that.free, context), buffer.merge(that.buffer, context), cache.merge(that.cache, context), swapUsed.merge(that.swapUsed, context), swapFree.merge(that.swapFree, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Used -> used,
- Free -> free,
- Buffer -> buffer,
- Cache -> cache,
- SwapUsed -> swapUsed,
- SwapFree -> swapFree)
- }
-
- val Factory = MemoryMetricGroupFactory
-}
-
-case object MemoryMetricGroupFactory extends MetricGroupFactory {
-
- import MemoryMetrics._
-
- type GroupRecorder = MemoryMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.memory")
-
- val usedConfig = settings.getConfig("used")
- val freeConfig = settings.getConfig("free")
- val bufferConfig = settings.getConfig("buffer")
- val cacheConfig = settings.getConfig("cache")
- val swapUsedConfig = settings.getConfig("swap-used")
- val swapFreeConfig = settings.getConfig("swap-free")
-
- new MemoryMetricRecorder(
- Histogram.fromConfig(usedConfig, Scale.Mega),
- Histogram.fromConfig(freeConfig, Scale.Mega),
- Histogram.fromConfig(swapUsedConfig, Scale.Mega),
- Histogram.fromConfig(swapFreeConfig, Scale.Mega),
- Histogram.fromConfig(bufferConfig, Scale.Mega),
- Histogram.fromConfig(cacheConfig, Scale.Mega))
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
deleted file mode 100644
index d8a38f6d..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/NetworkMetrics.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class NetworkMetrics(name: String) extends MetricGroupIdentity {
- val category = NetworkMetrics
-}
-
-object NetworkMetrics extends MetricGroupCategory {
- val name = "network"
-
- case object RxBytes extends MetricIdentity { val name = "rx-bytes" }
- case object TxBytes extends MetricIdentity { val name = "tx-bytes" }
- case object RxErrors extends MetricIdentity { val name = "rx-errors" }
- case object TxErrors extends MetricIdentity { val name = "tx-errors" }
- case object RxDropped extends MetricIdentity { val name = "rx-dropped" }
- case object TxDropped extends MetricIdentity { val name = "tx-dropped" }
-
- case class NetworkMetricRecorder(rxBytes: Histogram, txBytes: Histogram, rxErrors: Histogram, txErrors: Histogram, rxDropped: Histogram, txDropped: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- NetworkMetricSnapshot(rxBytes.collect(context), txBytes.collect(context), rxErrors.collect(context), txErrors.collect(context), rxDropped.collect(context), txDropped.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class NetworkMetricSnapshot(rxBytes: Histogram.Snapshot, txBytes: Histogram.Snapshot, rxErrors: Histogram.Snapshot, txErrors: Histogram.Snapshot, rxDropped: Histogram.Snapshot, txDropped: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = NetworkMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- NetworkMetricSnapshot(rxBytes.merge(that.rxBytes, context), txBytes.merge(that.txBytes, context), rxErrors.merge(that.rxErrors, context), txErrors.merge(that.txErrors, context), rxDropped.merge(that.rxDropped, context), txDropped.merge(that.txDropped, context))
- }
-
- val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- RxBytes -> rxBytes,
- TxBytes -> txBytes,
- RxErrors -> rxErrors,
- TxErrors -> txErrors,
- RxDropped -> rxDropped,
- TxDropped -> txDropped)
- }
-
- val Factory = NetworkMetricGroupFactory
-}
-
-case object NetworkMetricGroupFactory extends MetricGroupFactory {
- import NetworkMetrics._
-
- type GroupRecorder = NetworkMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.network")
-
- val rxBytesConfig = settings.getConfig("rx-bytes")
- val txBytesConfig = settings.getConfig("tx-bytes")
- val rxErrorsConfig = settings.getConfig("rx-errors")
- val txErrorsConfig = settings.getConfig("tx-errors")
- val rxDroppedConfig = settings.getConfig("rx-dropped")
- val txDroppedConfig = settings.getConfig("tx-dropped")
-
- new NetworkMetricRecorder(
- Histogram.fromConfig(rxBytesConfig, Scale.Kilo),
- Histogram.fromConfig(txBytesConfig, Scale.Kilo),
- Histogram.fromConfig(rxErrorsConfig),
- Histogram.fromConfig(txErrorsConfig),
- Histogram.fromConfig(rxDroppedConfig),
- Histogram.fromConfig(txDroppedConfig))
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/NonHeapMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/NonHeapMetrics.scala
deleted file mode 100644
index c2b9f9af..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/NonHeapMetrics.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.ManagementFactory
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class NonHeapMetrics(name: String) extends MetricGroupIdentity {
- val category = NonHeapMetrics
-}
-
-object NonHeapMetrics extends MetricGroupCategory {
- val name = "non-heap"
-
- case object Used extends MetricIdentity { val name = "used" }
- case object Max extends MetricIdentity { val name = "max" }
- case object Committed extends MetricIdentity { val name = "committed" }
-
- case class NonHeapMetricRecorder(used: Gauge, max: Gauge, committed: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- NonHeapMetricSnapshot(used.collect(context), max.collect(context), committed.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class NonHeapMetricSnapshot(used: Histogram.Snapshot, max: Histogram.Snapshot, committed: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = NonHeapMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- NonHeapMetricSnapshot(used.merge(that.used, context), max.merge(that.max, context), committed.merge(that.committed, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Used -> used,
- Max -> max,
- Committed -> committed)
- }
-
- val Factory = NonHeapMetricGroupFactory
-}
-
-case object NonHeapMetricGroupFactory extends MetricGroupFactory {
-
- import NonHeapMetrics._
- import kamon.system.SystemMetricsExtension._
-
- def nonHeap = ManagementFactory.getMemoryMXBean.getNonHeapMemoryUsage
-
- type GroupRecorder = NonHeapMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.non-heap")
-
- val usedNonHeapConfig = settings.getConfig("used")
- val maxNonHeapConfig = settings.getConfig("max")
- val committedNonHeapConfig = settings.getConfig("committed")
-
- new NonHeapMetricRecorder(
- Gauge.fromConfig(usedNonHeapConfig, system, Scale.Mega)(() ⇒ toMB(nonHeap.getUsed)),
- Gauge.fromConfig(maxNonHeapConfig, system, Scale.Mega)(() ⇒ toMB(nonHeap.getMax)),
- Gauge.fromConfig(committedNonHeapConfig, system, Scale.Mega)(() ⇒ toMB(nonHeap.getCommitted)))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
deleted file mode 100644
index ebd79d48..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ProcessCPUMetrics.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.Histogram
-
-case class ProcessCPUMetrics(name: String) extends MetricGroupIdentity {
- val category = ProcessCPUMetrics
-}
-
-object ProcessCPUMetrics extends MetricGroupCategory {
- val name = "proc-cpu"
-
- case object CpuPercent extends MetricIdentity { val name = "cpu-percentage" }
- case object TotalProcessTime extends MetricIdentity { val name = "total-process-time" }
-
- case class ProcessCPUMetricsRecorder(cpuPercent: Histogram, totalProcessTime: Histogram)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ProcessCPUMetricsSnapshot(cpuPercent.collect(context), totalProcessTime.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ProcessCPUMetricsSnapshot(cpuPercent: Histogram.Snapshot, totalProcessTime: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ProcessCPUMetricsSnapshot
-
- def merge(that: ProcessCPUMetricsSnapshot, context: CollectionContext): GroupSnapshotType = {
- ProcessCPUMetricsSnapshot(cpuPercent.merge(that.cpuPercent, context), totalProcessTime.merge(that.totalProcessTime, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- CpuPercent -> cpuPercent,
- TotalProcessTime -> totalProcessTime)
- }
-
- val Factory = ProcessCPUMetricGroupFactory
-}
-
-case object ProcessCPUMetricGroupFactory extends MetricGroupFactory {
- import ProcessCPUMetrics._
-
- type GroupRecorder = ProcessCPUMetricsRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.system.process-cpu")
-
- val cpuPercentageConfig = settings.getConfig("cpu-percentage")
- val totalProcessTimeConfig = settings.getConfig("total-process-time")
-
- new ProcessCPUMetricsRecorder(
- Histogram.fromConfig(cpuPercentageConfig),
- Histogram.fromConfig(totalProcessTimeConfig))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/metrics/ThreadMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/metrics/ThreadMetrics.scala
deleted file mode 100644
index fc039ffa..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/metrics/ThreadMetrics.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.metrics
-
-import java.lang.management.ManagementFactory
-
-import akka.actor.ActorSystem
-import com.typesafe.config.Config
-import kamon.metric._
-import kamon.metric.instrument.{ Gauge, Histogram }
-
-case class ThreadMetrics(name: String) extends MetricGroupIdentity {
- val category = ThreadMetrics
-}
-
-object ThreadMetrics extends MetricGroupCategory {
- val name = "thread"
-
- case object Damon extends MetricIdentity { val name = "daemon-count" }
- case object Count extends MetricIdentity { val name = "count" }
- case object Peak extends MetricIdentity { val name = "peak-count" }
-
- case class ThreadMetricRecorder(daemon: Gauge, count: Gauge, peak: Gauge)
- extends MetricGroupRecorder {
-
- def collect(context: CollectionContext): MetricGroupSnapshot = {
- ThreadMetricSnapshot(daemon.collect(context), count.collect(context), peak.collect(context))
- }
-
- def cleanup: Unit = {}
- }
-
- case class ThreadMetricSnapshot(daemon: Histogram.Snapshot, count: Histogram.Snapshot, peak: Histogram.Snapshot)
- extends MetricGroupSnapshot {
-
- type GroupSnapshotType = ThreadMetricSnapshot
-
- def merge(that: GroupSnapshotType, context: CollectionContext): GroupSnapshotType = {
- ThreadMetricSnapshot(daemon.merge(that.daemon, context), count.merge(that.count, context), peak.merge(that.peak, context))
- }
-
- lazy val metrics: Map[MetricIdentity, MetricSnapshot] = Map(
- Damon -> daemon,
- Count -> count,
- Peak -> peak)
- }
-
- val Factory = ThreadMetricGroupFactory
-}
-
-case object ThreadMetricGroupFactory extends MetricGroupFactory {
-
- import ThreadMetrics._
-
- def threads = ManagementFactory.getThreadMXBean
-
- type GroupRecorder = ThreadMetricRecorder
-
- def create(config: Config, system: ActorSystem): GroupRecorder = {
- val settings = config.getConfig("precision.jvm.thread")
-
- val daemonThreadConfig = settings.getConfig("daemon")
- val countThreadsConfig = settings.getConfig("count")
- val peakThreadsConfig = settings.getConfig("peak")
-
- new ThreadMetricRecorder(
- Gauge.fromConfig(daemonThreadConfig, system)(() ⇒ threads.getDaemonThreadCount.toLong),
- Gauge.fromConfig(countThreadsConfig, system)(() ⇒ threads.getThreadCount.toLong),
- Gauge.fromConfig(peakThreadsConfig, system)(() ⇒ threads.getPeakThreadCount.toLong))
- }
-}
-
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/GcMetricsCollector.scala b/kamon-system-metrics/src/main/scala/kamon/system/GcMetricsCollector.scala
deleted file mode 100644
index ae2f50cf..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/GcMetricsCollector.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.system
-
-import java.lang.management.GarbageCollectorMXBean
-
-import akka.actor.{ Actor, Props }
-import kamon.metrics.GCMetrics.GCMetricRecorder
-
-import scala.concurrent.duration.FiniteDuration
-
-class GcMetricsCollector(collectInterval: FiniteDuration, recorder: Option[GCMetricRecorder], extractor: GcMetricExtractor) extends Actor {
- import kamon.system.GcMetricsCollector._
-
- val collectSchedule = context.system.scheduler.schedule(collectInterval, collectInterval, self, Collect)(SystemMetrics(context.system).dispatcher)
-
- def receive: Receive = {
- case Collect ⇒ collectMetrics()
- }
-
- override def postStop() = collectSchedule.cancel()
-
- def collectMetrics(): Unit = recorder.map(recordGc)
-
- private def recordGc(gcr: GCMetricRecorder) = {
- val gcMeasure = extractor.extract()
-
- gcr.count.record(gcMeasure.collectionCount)
- gcr.time.record(gcMeasure.collectionTime)
- }
-}
-
-object GcMetricsCollector {
- case object Collect
-
- def props(collectInterval: FiniteDuration, recorder: Option[GCMetricRecorder], extractor: GcMetricExtractor): Props = Props(classOf[GcMetricsCollector], collectInterval, recorder, extractor)
-}
-
-case class GcMeasure(collectionCount: Long, collectionTime: Long)
-
-case class GcMetricExtractor(gc: GarbageCollectorMXBean) {
- var previousGcCount = 0L
- var previousGcTime = 0L
-
- def extract(): GcMeasure = {
- var diffCollectionCount = 0L
- var diffCollectionTime = 0L
-
- val collectionCount = gc.getCollectionCount
- val collectionTime = gc.getCollectionTime
-
- if (collectionCount > 0)
- diffCollectionCount = collectionCount - previousGcCount
-
- if (collectionTime > 0)
- diffCollectionTime = collectionTime - previousGcTime
-
- previousGcCount = collectionCount
- previousGcTime = collectionTime
-
- GcMeasure(diffCollectionCount, diffCollectionTime)
- }
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
deleted file mode 100644
index cb3e2695..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetrics.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.system
-
-import java.lang.management.ManagementFactory
-import akka.actor._
-import akka.event.Logging
-import kamon.Kamon
-import kamon.metric.Metrics
-import kamon.metrics._
-import scala.collection.JavaConverters._
-import scala.concurrent.duration._
-
-object SystemMetrics extends ExtensionId[SystemMetricsExtension] with ExtensionIdProvider {
- override def lookup(): ExtensionId[_ <: Extension] = SystemMetrics
- override def createExtension(system: ExtendedActorSystem): SystemMetricsExtension = new SystemMetricsExtension(system)
-}
-
-class SystemMetricsExtension(private val system: ExtendedActorSystem) extends Kamon.Extension {
- import kamon.system.SystemMetricsExtension._
-
- val log = Logging(system, classOf[SystemMetricsExtension])
- log.info(s"Starting the Kamon(SystemMetrics) extension")
-
- val config = system.settings.config.getConfig("kamon.system-metrics")
- val dispatcher = system.dispatchers.lookup(config.getString("dispatcher"))
- val sigarFolder = system.settings.config.getString("kamon.sigar.folder")
- val systemMetricsExtension = Kamon(Metrics)(system)
-
- //System Metrics
- system.actorOf(SystemMetricsCollector.props(1 second), "system-metrics-collector")
-
- //JVM Metrics
- systemMetricsExtension.register(HeapMetrics(Heap), HeapMetrics.Factory)
- systemMetricsExtension.register(NonHeapMetrics(NonHeap), NonHeapMetrics.Factory)
- systemMetricsExtension.register(ClassLoadingMetrics(Classes), ClassLoadingMetrics.Factory)
- systemMetricsExtension.register(ThreadMetrics(Threads), ThreadMetrics.Factory)
-
- garbageCollectors.map { gc ⇒
- val gcName = sanitize(gc.getName)
- val recorder = systemMetricsExtension.register(GCMetrics(gcName), GCMetrics.Factory(gc))
- system.actorOf(GcMetricsCollector.props(1 second, recorder, GcMetricExtractor(gc)), s"$gcName-collector")
- }
-}
-
-object SystemMetricsExtension {
- val CPU = "cpu"
- val ProcessCPU = "process-cpu"
- val Network = "network"
- val Memory = "memory"
- val Heap = "heap"
- val NonHeap = "non-heap"
- val Classes = "classes"
- val Threads = "thread"
- val ContextSwitches = "context-switches"
- val Disk = "disk"
- val LoadAverage = "load-average"
-
- def toKB(value: Long): Long = value / 1024
- def toMB(value: Long): Long = value / 1024 / 1024
- def toLong(value: Double): Long = math round (value * 100L)
- def sanitize(str: String): String = str.replaceAll("""[^\w]""", "-")
-
- val garbageCollectors = ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid)
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsBanner.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsBanner.scala
deleted file mode 100644
index 99e09da9..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsBanner.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-
-package kamon.system
-
-import akka.actor.ActorLogging
-import org.hyperic.sigar._
-
-import scala.util.control.NoStackTrace
-
-trait SystemMetricsBanner {
- self: ActorLogging ⇒
-
- def printBanner(sigar: Sigar) = {
- val os = OperatingSystem.getInstance
-
- def loadAverage(sigar: Sigar) = try {
- val average = sigar.getLoadAverage
- (average(0), average(1), average(2))
- } catch {
- case s: org.hyperic.sigar.SigarNotImplementedException ⇒ (0d, 0d, 0d)
- }
-
- def uptime(sigar: Sigar) = {
- def formatUptime(uptime: Double): String = {
- var retval: String = ""
- val days: Int = uptime.toInt / (60 * 60 * 24)
- var minutes: Int = 0
- var hours: Int = 0
-
- if (days != 0) {
- retval += s"$days ${if (days > 1) "days" else "day"}, "
- }
-
- minutes = uptime.toInt / 60
- hours = minutes / 60
- hours %= 24
- minutes %= 60
-
- if (hours != 0) {
- retval += hours + ":" + minutes
- } else {
- retval += minutes + " min"
- }
- retval
- }
-
- val uptime = sigar.getUptime
- val now = System.currentTimeMillis()
-
- s"up ${formatUptime(uptime.getUptime)}"
- }
-
- val message =
- """
- |
- | _____ _ __ __ _ _ _ _ _
- | / ____| | | | \/ | | | (_) | | | | | |
- || (___ _ _ ___| |_ ___ _ __ ___ | \ / | ___| |_ _ __ _ ___ ___| | ___ __ _ __| | ___ __| |
- | \___ \| | | / __| __/ _ \ '_ ` _ \| |\/| |/ _ \ __| '__| |/ __/ __| | / _ \ / _` |/ _` |/ _ \/ _` |
- | ____) | |_| \__ \ || __/ | | | | | | | | __/ |_| | | | (__\__ \ |___| (_) | (_| | (_| | __/ (_| |
- ||_____/ \__, |___/\__\___|_| |_| |_|_| |_|\___|\__|_| |_|\___|___/______\___/ \__,_|\__,_|\___|\__,_|
- | __/ |
- | |___/
- |
- | [System Status] [OS Information]
- | |--------------------------------| |----------------------------------------|
- | Up Time: %-10s Description: %s
- | Load Average: %-16s Name: %s
- | Version: %s
- | Arch: %s
- |
- """.stripMargin.format(uptime(sigar), os.getDescription, loadAverage(sigar), os.getName, os.getVersion, os.getArch)
- log.info(message)
- }
-
- class UnexpectedSigarException(message: String) extends RuntimeException(message) with NoStackTrace
-}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
deleted file mode 100644
index 4391240a..00000000
--- a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsCollector.scala
+++ /dev/null
@@ -1,266 +0,0 @@
-/*
- * =========================================================================================
- * Copyright © 2013-2014 the kamon project <http://kamon.io/>
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
- * except in compliance with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the
- * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions
- * and limitations under the License.
- * =========================================================================================
- */
-package kamon.system
-
-import java.io.{ File, IOException }
-
-import akka.actor.{ Actor, ActorLogging, Props }
-import kamon.Kamon
-import kamon.metric.Metrics
-import kamon.metrics.CPUMetrics.CPUMetricRecorder
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsRecorder
-import kamon.metrics.DiskMetrics.DiskMetricsRecorder
-import kamon.metrics.LoadAverageMetrics.LoadAverageMetricsRecorder
-import kamon.metrics.MemoryMetrics.MemoryMetricRecorder
-import kamon.metrics.NetworkMetrics.NetworkMetricRecorder
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsRecorder
-import kamon.metrics._
-import kamon.sigar.SigarProvisioner
-import org.hyperic.sigar._
-
-import scala.collection.concurrent.TrieMap
-import scala.concurrent.duration.FiniteDuration
-import scala.io.Source
-import scala.collection.mutable
-
-class SystemMetricsCollector(collectInterval: FiniteDuration) extends Actor with ActorLogging with SystemMetricsBanner {
- import kamon.system.SystemMetricsCollector._
- import kamon.system.SystemMetricsExtension._
-
- lazy val sigar = createSigarInstance
- def pid = sigar.getPid
-
- val interfaces = sigar.getNetInterfaceList.filterNot(NetworkFilter).toSet
- val fileSystems = sigar.getFileSystemList.filter(_.getType == FileSystem.TYPE_LOCAL_DISK).map(_.getDevName).toSet
-
- val metricExtension = Kamon(Metrics)(context.system)
- val collectSchedule = context.system.scheduler.schedule(collectInterval, collectInterval, self, Collect)(SystemMetrics(context.system).dispatcher)
-
- val cpuRecorder = metricExtension.register(CPUMetrics(CPU), CPUMetrics.Factory)
- val processCpuRecorder = metricExtension.register(ProcessCPUMetrics(ProcessCPU), ProcessCPUMetrics.Factory)
- val memoryRecorder = metricExtension.register(MemoryMetrics(Memory), MemoryMetrics.Factory)
- val networkRecorder = metricExtension.register(NetworkMetrics(Network), NetworkMetrics.Factory)
- val contextSwitchesRecorder = metricExtension.register(ContextSwitchesMetrics(ContextSwitches), ContextSwitchesMetrics.Factory)
- val diskRecorder = metricExtension.register(DiskMetrics(Disk), DiskMetrics.Factory)
- val loadAverageRecorder = metricExtension.register(LoadAverageMetrics(LoadAverage), LoadAverageMetrics.Factory)
-
- def receive: Receive = {
- case Collect ⇒ collectMetrics()
- }
-
- override def postStop() = collectSchedule.cancel()
-
- def collectMetrics() = {
- cpuRecorder.map(recordCpu)
- processCpuRecorder.map(recordProcessCpu)
- memoryRecorder.map(recordMemory)
- networkRecorder.map(recordNetwork)
- diskRecorder.map(recordDisk)
- loadAverageRecorder.map(recordLoadAverage)
-
- if (OsUtils.isLinux)
- contextSwitchesRecorder.map(recordContextSwitches)
- }
-
- private def recordCpu(cpur: CPUMetricRecorder) = {
- val cpuPerc = sigar.getCpuPerc
- cpur.user.record(toLong(cpuPerc.getUser))
- cpur.system.record(toLong(cpuPerc.getSys))
- cpur.cpuWait.record(toLong(cpuPerc.getWait))
- cpur.idle.record(toLong(cpuPerc.getIdle))
- cpur.stolen.record(toLong(cpuPerc.getStolen))
- }
-
- private def recordProcessCpu(pcpur: ProcessCPUMetricsRecorder) = {
- val procCpu = sigar.getProcCpu(pid)
- val procTime = sigar.getProcTime(pid)
-
- pcpur.cpuPercent.record(toLong(procCpu.getPercent))
- pcpur.totalProcessTime.record(procTime.getTotal) // gives an idea of what is really measured and then interpreted as %
- }
-
- private def recordMemory(mr: MemoryMetricRecorder) = {
- val mem = sigar.getMem
- val swap = sigar.getSwap
-
- mr.used.record(toMB(mem.getUsed))
- mr.free.record(toMB(mem.getFree))
- mr.swapUsed.record(toMB(swap.getUsed))
- mr.swapFree.record(toMB(swap.getFree))
- mr.buffer.record(toMB(collectBuffer(mem)))
- mr.cache.record(toMB(collectCache(mem)))
-
- def collectBuffer(mem: Mem): Long = if (mem.getUsed != mem.getActualUsed) mem.getActualUsed else 0L
- def collectCache(mem: Mem): Long = if (mem.getFree != mem.getActualFree) mem.getActualFree else 0L
- }
-
- private def recordNetwork(nr: NetworkMetricRecorder) = {
- import Networks._
- nr.rxBytes.record(collect(sigar, interfaces, RxBytes, previousNetworkMetrics)(net ⇒ toKB(net.getRxBytes)))
- nr.txBytes.record(collect(sigar, interfaces, TxBytes, previousNetworkMetrics)(net ⇒ toKB(net.getTxBytes)))
- nr.rxErrors.record(collect(sigar, interfaces, RxErrors, previousNetworkMetrics)(net ⇒ net.getRxErrors))
- nr.txErrors.record(collect(sigar, interfaces, TxErrors, previousNetworkMetrics)(net ⇒ net.getTxErrors))
- nr.rxDropped.record(collect(sigar, interfaces, RxDropped, previousNetworkMetrics)(net ⇒ net.getRxDropped))
- nr.txDropped.record(collect(sigar, interfaces, TxDropped, previousNetworkMetrics)(net ⇒ net.getTxDropped))
-
- def collect(sigar: SigarProxy, interfaces: Set[String], name: String, previousMetrics: TrieMap[String, mutable.Map[String, Long]])(thunk: NetInterfaceStat ⇒ Long): Long = {
- interfaces.foldLeft(0L) { (acc, interface) ⇒
- {
- val net = sigar.getNetInterfaceStat(interface)
- val previous = previousMetrics.getOrElse(interface, mutable.Map.empty[String, Long])
- val current = thunk(net)
- val delta = current - previous.getOrElse(name, 0L)
- previousMetrics.put(interface, previous += name -> current)
- acc + delta
- }
- }
- }
- }
-
- private def recordDisk(rd: DiskMetricsRecorder) = {
- import Disks._
-
- rd.reads.record(collect(sigar, fileSystems, Reads, previousDiskMetrics)(disk ⇒ disk.getReads))
- rd.writes.record(collect(sigar, fileSystems, Writes, previousDiskMetrics)(disk ⇒ disk.getWrites))
- rd.queue.record(collect(sigar, fileSystems, Queue, previousDiskMetrics)(disk ⇒ toLong(disk.getQueue)))
- rd.serviceTime.record(collect(sigar, fileSystems, Service, previousDiskMetrics)(disk ⇒ toLong(disk.getServiceTime)))
- }
-
- def collect(sigar: SigarProxy, fileSystems: Set[String], name: String, previousMetrics: TrieMap[String, mutable.Map[String, Long]])(thunk: DiskUsage ⇒ Long): Long = {
- fileSystems.foldLeft(0L) { (acc, fileSystem) ⇒
- {
- val disk = sigar.getDiskUsage(fileSystem)
- val previous = previousMetrics.getOrElse(fileSystem, mutable.Map.empty[String, Long])
- val value = thunk(disk)
- val current = if (value == Sigar.FIELD_NOTIMPL) 0L else value
- val delta = current - previous.getOrElse(name, 0L)
- previousMetrics.put(fileSystem, previous += name -> current)
- acc + delta
- }
- }
- }
-
- private def recordLoadAverage(lar: LoadAverageMetricsRecorder) = {
- val loadAverage = sigar.getLoadAverage
- val (one, five, fifteen) = (loadAverage(0), loadAverage(1), loadAverage(2))
-
- lar.one.record(toLong(one))
- lar.five.record(toLong(five))
- lar.fifteen.record(toLong(fifteen))
- }
-
- private def recordContextSwitches(rcs: ContextSwitchesMetricsRecorder) = {
- def contextSwitchesByProcess(pid: Long): (Long, Long) = {
- val filename = s"/proc/$pid/status"
- var voluntaryContextSwitches = 0L
- var nonVoluntaryContextSwitches = 0L
-
- try {
- for (line ← Source.fromFile(filename).getLines()) {
- if (line.startsWith("voluntary_ctxt_switches")) {
- voluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
- }
- if (line.startsWith("nonvoluntary_ctxt_switches")) {
- nonVoluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
- }
- }
- } catch {
- case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
- }
- (voluntaryContextSwitches, nonVoluntaryContextSwitches)
- }
-
- def contextSwitches: Long = {
- val filename = "/proc/stat"
- var contextSwitches = 0L
-
- try {
- for (line ← Source.fromFile(filename).getLines()) {
- if (line.startsWith("rcs")) {
- contextSwitches = line.substring(line.indexOf(" ") + 1).toLong
- }
- }
- } catch {
- case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
- }
- contextSwitches
- }
-
- val (perProcessVoluntary, perProcessNonVoluntary) = contextSwitchesByProcess(pid)
- rcs.perProcessVoluntary.record(perProcessVoluntary)
- rcs.perProcessNonVoluntary.record(perProcessNonVoluntary)
- rcs.global.record(contextSwitches)
- }
-
- def verifiedSigarInstance: SigarProxy = {
- val sigar = new Sigar()
- printBanner(sigar)
- sigar
- }
-
- def provisionSigarLibrary: Unit = {
- val folder = SystemMetrics(context.system).sigarFolder
- SigarProvisioner.provision(new File(folder))
- }
-
- def createSigarInstance: SigarProxy = {
- // 1) Assume that library is already provisioned.
- try {
- return verifiedSigarInstance
- } catch {
- // Not using [[Try]] - any error is non-fatal in this case.
- case e: Throwable ⇒ log.info(s"Sigar is not yet provisioned: ${e}")
- }
-
- // 2) Attempt to provision library via sigar-loader.
- try {
- provisionSigarLibrary
- return verifiedSigarInstance
- } catch {
- // Not using [[Try]] - any error is non-fatal in this case.
- case e: Throwable ⇒ throw new UnexpectedSigarException(s"Failed to load Sigar: ${e}")
- }
- }
-}
-
-object SystemMetricsCollector {
- val NetworkFilter = Set("lo")
- val previousDiskMetrics = TrieMap[String, mutable.Map[String, Long]]()
- val previousNetworkMetrics = TrieMap[String, mutable.Map[String, Long]]()
-
- object Networks {
- val RxBytes = "rxBytes"
- val TxBytes = "txBytes"
- val RxErrors = "rxErrors"
- val TxErrors = "txErrors"
- val RxDropped = "rxDropped"
- val TxDropped = "txDropped"
- }
-
- object Disks {
- val Reads = "reads"
- val Writes = "writes"
- val Queue = "queue"
- val Service = "service"
- }
- case object Collect
-
- object OsUtils {
- def isLinux: Boolean = System.getProperty("os.name").indexOf("Linux") != -1
- }
-
- def props(collectInterval: FiniteDuration): Props = Props(classOf[SystemMetricsCollector], collectInterval)
-} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala
new file mode 100644
index 00000000..df120611
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/SystemMetricsExtension.scala
@@ -0,0 +1,70 @@
+/*
+ * =========================================================================================
+ * Copyright © 2013-2014 the kamon project <http://kamon.io/>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
+ * except in compliance with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the
+ * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ * =========================================================================================
+ */
+package kamon.system
+
+import java.io.File
+import akka.actor._
+import akka.event.Logging
+import kamon.system.custom.{ ContextSwitchesUpdater, ContextSwitchesMetrics }
+import kamon.system.jmx._
+import kamon.{ ModuleSupervisor, Kamon }
+import kamon.metric._
+import kamon.sigar.SigarProvisioner
+import kamon.system.sigar.SigarMetricsUpdater
+
+import kamon.util.ConfigTools.Syntax
+
+object SystemMetrics extends ExtensionId[SystemMetricsExtension] with ExtensionIdProvider {
+ override def lookup(): ExtensionId[_ <: Extension] = SystemMetrics
+ override def createExtension(system: ExtendedActorSystem): SystemMetricsExtension = new SystemMetricsExtension(system)
+}
+
+class SystemMetricsExtension(system: ExtendedActorSystem) extends Kamon.Extension {
+
+ val log = Logging(system, classOf[SystemMetricsExtension])
+ log.info(s"Starting the Kamon(SystemMetrics) extension")
+
+ val config = system.settings.config.getConfig("kamon.system-metrics")
+ val sigarFolder = config.getString("sigar-native-folder")
+ val sigarRefreshInterval = config.getFiniteDuration("sigar-metrics-refresh-interval")
+ val contextSwitchesRefreshInterval = config.getFiniteDuration("context-switches-refresh-interval")
+ val metricsExtension = Kamon(Metrics)(system)
+
+ // Sigar-based metrics
+ SigarProvisioner.provision(new File(sigarFolder))
+ val sigarMetricsRecorder = ModuleSupervisor.get(system).createModule("sigar-metrics-recorder",
+ SigarMetricsUpdater.props(sigarRefreshInterval).withDispatcher("kamon.system-metrics.sigar-dispatcher"))
+
+ // JMX Metrics
+ ClassLoadingMetrics.register(metricsExtension)
+ GarbageCollectionMetrics.register(metricsExtension)
+ HeapMemoryMetrics.register(metricsExtension)
+ NonHeapMemoryMetrics.register(metricsExtension)
+ ThreadsMetrics.register(metricsExtension)
+
+ // If we are in Linux, add ContextSwitchesMetrics as well.
+ if (isLinux) {
+ val contextSwitchesRecorder = ContextSwitchesMetrics.register(system, contextSwitchesRefreshInterval)
+
+ ModuleSupervisor.get(system).createModule("context-switches-metrics-recorder",
+ ContextSwitchesUpdater.props(contextSwitchesRecorder, sigarRefreshInterval)
+ .withDispatcher("kamon.system-metrics.context-switches-dispatcher"))
+ }
+
+ def isLinux: Boolean =
+ System.getProperty("os.name").indexOf("Linux") != -1
+
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala
new file mode 100644
index 00000000..a3c56733
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/custom/ContextSwitchesMetrics.scala
@@ -0,0 +1,96 @@
+package kamon.system.custom
+
+import java.io.IOException
+import java.nio.charset.StandardCharsets
+import java.nio.file.{ Paths, Files }
+
+import akka.actor.{ Props, Actor, ActorSystem }
+import akka.event.{ Logging, LoggingAdapter }
+import kamon.Kamon
+import kamon.metric._
+import kamon.metric.instrument.InstrumentFactory
+import kamon.system.custom.ContextSwitchesUpdater.UpdateContextSwitches
+import org.hyperic.sigar.Sigar
+import scala.collection.JavaConverters.iterableAsScalaIterableConverter
+import scala.concurrent.duration.FiniteDuration
+
+class ContextSwitchesMetrics(pid: Long, log: LoggingAdapter, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val perProcessVoluntary = histogram("context-switches-process-voluntary")
+ val perProcessNonVoluntary = histogram("context-switches-process-non-voluntary")
+ val global = histogram("context-switches-global")
+
+ def update(): Unit = {
+ def contextSwitchesByProcess(pid: Long): (Long, Long) = {
+ val filename = s"/proc/$pid/status"
+ var voluntaryContextSwitches = 0L
+ var nonVoluntaryContextSwitches = 0L
+
+ try {
+ for (line ← Files.readAllLines(Paths.get(filename), StandardCharsets.US_ASCII).asScala.toList) {
+ if (line.startsWith("voluntary_ctxt_switches")) {
+ voluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
+ }
+ if (line.startsWith("nonvoluntary_ctxt_switches")) {
+ nonVoluntaryContextSwitches = line.substring(line.indexOf(":") + 1).trim.toLong
+ }
+ }
+ } catch {
+ case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
+ }
+ (voluntaryContextSwitches, nonVoluntaryContextSwitches)
+ }
+
+ def contextSwitches: Long = {
+ val filename = "/proc/stat"
+ var contextSwitches = 0L
+
+ try {
+ for (line ← Files.readAllLines(Paths.get(filename), StandardCharsets.US_ASCII).asScala.toList) {
+ if (line.startsWith("rcs")) {
+ contextSwitches = line.substring(line.indexOf(" ") + 1).toLong
+ }
+ }
+ } catch {
+ case ex: IOException ⇒ log.error("Error trying to read [{}]", filename)
+ }
+ contextSwitches
+ }
+
+ val (voluntary, nonVoluntary) = contextSwitchesByProcess(pid)
+ perProcessVoluntary.record(voluntary)
+ perProcessNonVoluntary.record(nonVoluntary)
+ global.record(contextSwitches)
+ }
+}
+
+object ContextSwitchesMetrics {
+
+ def register(system: ActorSystem, refreshInterval: FiniteDuration): ContextSwitchesMetrics = {
+ val metricsExtension = Kamon(Metrics)(system)
+ val log = Logging(system, "ContextSwitchesMetrics")
+ val pid = (new Sigar).getPid
+
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity("context-switches", "system-metric"), new ContextSwitchesMetrics(pid, log, instrumentFactory)).recorder
+ }
+}
+
+class ContextSwitchesUpdater(csm: ContextSwitchesMetrics, refreshInterval: FiniteDuration) extends Actor {
+ val schedule = context.system.scheduler.schedule(refreshInterval, refreshInterval, self, UpdateContextSwitches)(context.dispatcher)
+
+ def receive = {
+ case UpdateContextSwitches ⇒ csm.update()
+ }
+
+ override def postStop(): Unit = {
+ schedule.cancel()
+ super.postStop()
+ }
+}
+
+object ContextSwitchesUpdater {
+ case object UpdateContextSwitches
+
+ def props(csm: ContextSwitchesMetrics, refreshInterval: FiniteDuration): Props =
+ Props(new ContextSwitchesUpdater(csm, refreshInterval))
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala
new file mode 100644
index 00000000..d9379738
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ClassLoadingMetrics.scala
@@ -0,0 +1,28 @@
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+class ClassLoadingMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val classLoadingBean = ManagementFactory.getClassLoadingMXBean
+
+ gauge("classes-loaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getTotalLoadedClassCount
+ })
+
+ gauge("classes-unloaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getUnloadedClassCount
+ })
+
+ gauge("classes-currently-loaded", Memory.Bytes, () ⇒ {
+ classLoadingBean.getLoadedClassCount.toLong
+ })
+
+}
+
+object ClassLoadingMetrics extends JmxSystemMetricRecorderCompanion("class-loading") {
+ def apply(instrumentFactory: InstrumentFactory): ClassLoadingMetrics =
+ new ClassLoadingMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala
new file mode 100644
index 00000000..b7d2fe6a
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/GarbageCollectionMetrics.scala
@@ -0,0 +1,34 @@
+package kamon.system.jmx
+
+import java.lang.management.{ GarbageCollectorMXBean, ManagementFactory }
+
+import kamon.metric.{ Entity, MetricsExtension, GenericEntityRecorder }
+import kamon.metric.instrument.{ DifferentialValueCollector, Time, InstrumentFactory }
+import scala.collection.JavaConverters._
+
+class GarbageCollectionMetrics(gc: GarbageCollectorMXBean, instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+
+ gauge("garbage-collection-count", DifferentialValueCollector(() ⇒ {
+ gc.getCollectionCount
+ }))
+
+ gauge("garbage-collection-time", Time.Milliseconds, DifferentialValueCollector(() ⇒ {
+ gc.getCollectionTime
+ }))
+
+}
+
+object GarbageCollectionMetrics {
+
+ def sanitizeCollectorName(name: String): String =
+ name.replaceAll("""[^\w]""", "-").toLowerCase
+
+ def register(metricsExtension: MetricsExtension): Unit = {
+
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid) map { gc ⇒
+ val gcName = sanitizeCollectorName(gc.getName)
+ metricsExtension.register(Entity(s"$gcName-garbage-collector", "system-metric"), new GarbageCollectionMetrics(gc, instrumentFactory))
+ }
+ }
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala
new file mode 100644
index 00000000..a96b5319
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/HeapMemoryMetrics.scala
@@ -0,0 +1,29 @@
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+class HeapMemoryMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val memoryBean = ManagementFactory.getMemoryMXBean
+ def nonHeapUsage = memoryBean.getHeapMemoryUsage
+
+ gauge("heap-used", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getUsed
+ })
+
+ gauge("heap-max", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getMax
+ })
+
+ gauge("heap-committed", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getCommitted
+ })
+
+}
+
+object HeapMemoryMetrics extends JmxSystemMetricRecorderCompanion("heap-memory") {
+ def apply(instrumentFactory: InstrumentFactory): HeapMemoryMetrics =
+ new HeapMemoryMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala
new file mode 100644
index 00000000..d19622e6
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/JmxSystemMetricRecorderCompanion.scala
@@ -0,0 +1,13 @@
+package kamon.system.jmx
+
+import kamon.metric.instrument.InstrumentFactory
+import kamon.metric.{ Entity, EntityRecorder, MetricsExtension }
+
+abstract class JmxSystemMetricRecorderCompanion(metricName: String) {
+ def register(metricsExtension: MetricsExtension): EntityRecorder = {
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity(metricName, "system-metric"), apply(instrumentFactory)).recorder
+ }
+
+ def apply(instrumentFactory: InstrumentFactory): EntityRecorder
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala
new file mode 100644
index 00000000..34a23d4f
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/NonHeapMemoryMetrics.scala
@@ -0,0 +1,33 @@
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+
+class NonHeapMemoryMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val memoryBean = ManagementFactory.getMemoryMXBean
+ def nonHeapUsage = memoryBean.getNonHeapMemoryUsage
+
+ gauge("non-heap-used", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getUsed
+ })
+
+ gauge("non-heap-max", Memory.Bytes, () ⇒ {
+ val max = nonHeapUsage.getMax
+
+ // .getMax can return -1 if the max is not defined.
+ if (max >= 0) max
+ else 0
+ })
+
+ gauge("non-heap-committed", Memory.Bytes, () ⇒ {
+ nonHeapUsage.getCommitted
+ })
+
+}
+
+object NonHeapMemoryMetrics extends JmxSystemMetricRecorderCompanion("non-heap-memory") {
+ def apply(instrumentFactory: InstrumentFactory): NonHeapMemoryMetrics =
+ new NonHeapMemoryMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala
new file mode 100644
index 00000000..b33eb3e6
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/jmx/ThreadsMetrics.scala
@@ -0,0 +1,28 @@
+package kamon.system.jmx
+
+import java.lang.management.ManagementFactory
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+
+class ThreadsMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) {
+ val threadsBean = ManagementFactory.getThreadMXBean
+
+ gauge("daemon-thread-count", () ⇒ {
+ threadsBean.getDaemonThreadCount.toLong
+ })
+
+ gauge("peak-thread-count", () ⇒ {
+ threadsBean.getPeakThreadCount.toLong
+ })
+
+ gauge("thread-count", () ⇒ {
+ threadsBean.getThreadCount.toLong
+ })
+
+}
+
+object ThreadsMetrics extends JmxSystemMetricRecorderCompanion("threads") {
+ def apply(instrumentFactory: InstrumentFactory): ThreadsMetrics =
+ new ThreadsMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala
new file mode 100644
index 00000000..0a5f6494
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/CpuMetrics.scala
@@ -0,0 +1,29 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.Sigar
+
+class CpuMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val user = histogram("cpu-user")
+ val system = histogram("cpu-system")
+ val Wait = histogram("cpu-wait")
+ val idle = histogram("cpu-idle")
+ val stolen = histogram("cpu-stolen")
+
+ def update(sigar: Sigar): Unit = {
+ val cpuPerc = sigar.getCpuPerc
+
+ user.record((cpuPerc.getUser * 100L).toLong)
+ system.record((cpuPerc.getSys * 100L).toLong)
+ Wait.record((cpuPerc.getWait * 100L).toLong)
+ idle.record((cpuPerc.getIdle * 100L).toLong)
+ stolen.record((cpuPerc.getStolen * 100L).toLong)
+ }
+}
+
+object CpuMetrics extends SigarMetricRecorderCompanion("cpu") {
+
+ def apply(instrumentFactory: InstrumentFactory): CpuMetrics =
+ new CpuMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala
new file mode 100644
index 00000000..94aa76d1
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/DiffRecordingHistogram.scala
@@ -0,0 +1,41 @@
+package kamon.system.sigar
+
+import java.util.concurrent.atomic.AtomicLong
+
+import kamon.metric.instrument.{ CollectionContext, Histogram }
+
+/**
+ * Wrapper Histogram for cases in which the recorded values should always be the difference
+ * between the current value and the last recorded value. This is not thread-safe and only
+ * to be used with Sigar-based metrics that are securely updated within an actor.
+ */
+class DiffRecordingHistogram(wrappedHistogram: Histogram) extends Histogram {
+ @volatile private var _recordedAtLeastOnce = false
+ private val _lastObservedValue = new AtomicLong(0)
+
+ private def processRecording(value: Long, count: Long): Unit = {
+ if (_recordedAtLeastOnce)
+ wrappedHistogram.record(value - _lastObservedValue.getAndSet(value), count)
+ else {
+ _lastObservedValue.set(value)
+ _recordedAtLeastOnce = true
+ }
+ }
+
+ def record(value: Long): Unit =
+ processRecording(value, 1)
+
+ def record(value: Long, count: Long): Unit =
+ processRecording(value, count)
+
+ def cleanup: Unit =
+ wrappedHistogram.cleanup
+
+ def collect(context: CollectionContext): Histogram.Snapshot =
+ wrappedHistogram.collect(context)
+}
+
+object DiffRecordingHistogram {
+ def apply(histogram: Histogram): DiffRecordingHistogram =
+ new DiffRecordingHistogram(histogram)
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala
new file mode 100644
index 00000000..dffebf5a
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/FileSystemMetrics.scala
@@ -0,0 +1,25 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+import org.hyperic.sigar.{ DiskUsage, FileSystem, Sigar }
+
+class FileSystemMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val reads = DiffRecordingHistogram(histogram("file-system-reads", Memory.Bytes))
+ val writes = DiffRecordingHistogram(histogram("file-system-writes", Memory.Bytes))
+
+ def sumOfAllFileSystems(sigar: Sigar, thunk: DiskUsage ⇒ Long): Long = {
+ val fileSystems = sigar.getFileSystemList.filter(_.getType == FileSystem.TYPE_LOCAL_DISK).map(_.getDevName).toSet
+ fileSystems.map(i ⇒ thunk(sigar.getDiskUsage(i))).fold(0L)(_ + _)
+ }
+
+ def update(sigar: Sigar): Unit = {
+ reads.record(sumOfAllFileSystems(sigar, _.getReadBytes))
+ writes.record(sumOfAllFileSystems(sigar, _.getWriteBytes))
+ }
+}
+
+object FileSystemMetrics extends SigarMetricRecorderCompanion("file-system") {
+ def apply(instrumentFactory: InstrumentFactory): FileSystemMetrics =
+ new FileSystemMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala
new file mode 100644
index 00000000..3e02cc8f
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/LoadAverageMetrics.scala
@@ -0,0 +1,25 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.Sigar
+
+class LoadAverageMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val oneMinute = histogram("one-minute")
+ val fiveMinutes = histogram("five-minutes")
+ val fifteenMinutes = histogram("fifteen-minutes")
+
+ def update(sigar: Sigar): Unit = {
+ val loadAverage = sigar.getLoadAverage
+
+ oneMinute.record(loadAverage(0).toLong)
+ fiveMinutes.record(loadAverage(1).toLong)
+ fifteenMinutes.record(loadAverage(2).toLong)
+ }
+}
+
+object LoadAverageMetrics extends SigarMetricRecorderCompanion("load-average") {
+
+ def apply(instrumentFactory: InstrumentFactory): LoadAverageMetrics =
+ new LoadAverageMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala
new file mode 100644
index 00000000..ab7fcd88
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/MemoryMetrics.scala
@@ -0,0 +1,36 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.{ Memory, InstrumentFactory }
+import org.hyperic.sigar.Sigar
+
+/**
+ * System memory usage metrics, as reported by Sigar:
+ * - used: Total used system memory.
+ * - free: Total free system memory (e.g. Linux plus cached).
+ * - swap-used: Total used system swap..
+ * - swap-free: Total free system swap.
+ */
+class MemoryMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val used = histogram("memory-used", Memory.Bytes)
+ val free = histogram("memory-free", Memory.Bytes)
+ val swapUsed = histogram("swap-used", Memory.Bytes)
+ val swapFree = histogram("swap-free", Memory.Bytes)
+
+ def update(sigar: Sigar): Unit = {
+ val mem = sigar.getMem
+ val swap = sigar.getSwap
+
+ used.record(mem.getUsed)
+ free.record(mem.getFree)
+ swapUsed.record(swap.getUsed)
+ swapFree.record(swap.getFree)
+ }
+}
+
+object MemoryMetrics extends SigarMetricRecorderCompanion("memory") {
+
+ def apply(instrumentFactory: InstrumentFactory): MemoryMetrics =
+ new MemoryMetrics(instrumentFactory)
+}
+
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala
new file mode 100644
index 00000000..fb33b7e4
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/NetworkMetrics.scala
@@ -0,0 +1,33 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument._
+import org.hyperic.sigar.{ NetInterfaceStat, Sigar }
+
+class NetworkMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val receivedBytes = DiffRecordingHistogram(histogram("rx-bytes", Memory.Bytes))
+ val transmittedBytes = DiffRecordingHistogram(histogram("tx-bytes", Memory.Bytes))
+ val receiveErrors = DiffRecordingHistogram(histogram("rx-errors"))
+ val transmitErrors = DiffRecordingHistogram(histogram("tx-errors"))
+ val receiveDrops = DiffRecordingHistogram(histogram("rx-dropped"))
+ val transmitDrops = DiffRecordingHistogram(histogram("tx-dropped"))
+
+ def sumOfAllInterfaces(sigar: Sigar, thunk: NetInterfaceStat ⇒ Long): Long = {
+ val interfaces = sigar.getNetInterfaceList.toList.filter(_ != "lo")
+ interfaces.map(i ⇒ thunk(sigar.getNetInterfaceStat(i))).fold(0L)(_ + _)
+ }
+
+ def update(sigar: Sigar): Unit = {
+ receivedBytes.record(sumOfAllInterfaces(sigar, _.getRxBytes))
+ transmittedBytes.record(sumOfAllInterfaces(sigar, _.getTxBytes))
+ receiveErrors.record(sumOfAllInterfaces(sigar, _.getRxErrors))
+ transmitErrors.record(sumOfAllInterfaces(sigar, _.getTxErrors))
+ receiveDrops.record(sumOfAllInterfaces(sigar, _.getRxDropped))
+ transmitDrops.record(sumOfAllInterfaces(sigar, _.getTxDropped))
+ }
+}
+
+object NetworkMetrics extends SigarMetricRecorderCompanion("network") {
+ def apply(instrumentFactory: InstrumentFactory): NetworkMetrics =
+ new NetworkMetrics(instrumentFactory)
+} \ No newline at end of file
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala
new file mode 100644
index 00000000..0ca5c1c8
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/ProcessCpuMetrics.scala
@@ -0,0 +1,39 @@
+package kamon.system.sigar
+
+import kamon.metric.GenericEntityRecorder
+import kamon.metric.instrument.InstrumentFactory
+import org.hyperic.sigar.{ ProcCpu, Sigar }
+
+class ProcessCpuMetrics(instrumentFactory: InstrumentFactory) extends GenericEntityRecorder(instrumentFactory) with SigarMetric {
+ val processUserCpu = histogram("process-user-cpu")
+ val processSystemCpu = histogram("process-system-cpu")
+ val processTotalCpu = histogram("process-cpu")
+
+ var lastProcCpu: Option[ProcCpu] = None
+
+ def update(sigar: Sigar): Unit = {
+ val pid = sigar.getPid
+ val procCpu = sigar.getProcCpu(pid)
+
+ lastProcCpu.map { last ⇒
+ val timeDiff = procCpu.getLastTime - last.getLastTime
+ if (timeDiff > 0) {
+ val userPercent = (((procCpu.getUser - last.getUser) / timeDiff.toDouble) * 100).toLong
+ val systemPercent = (((procCpu.getSys - last.getSys) / timeDiff.toDouble) * 100).toLong
+
+ processUserCpu.record(userPercent)
+ processSystemCpu.record(systemPercent)
+ processTotalCpu.record(userPercent + systemPercent)
+ }
+ }
+
+ lastProcCpu = Some(procCpu)
+
+ }
+}
+
+object ProcessCpuMetrics extends SigarMetricRecorderCompanion("process-cpu") {
+
+ def apply(instrumentFactory: InstrumentFactory): ProcessCpuMetrics =
+ new ProcessCpuMetrics(instrumentFactory)
+}
diff --git a/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala
new file mode 100644
index 00000000..8a430427
--- /dev/null
+++ b/kamon-system-metrics/src/main/scala/kamon/system/sigar/SigarMetricsUpdater.scala
@@ -0,0 +1,59 @@
+package kamon.system.sigar
+
+import akka.actor.{ Props, Actor }
+import kamon.Kamon
+import kamon.metric.instrument.InstrumentFactory
+import kamon.metric.{ Entity, EntityRecorder, MetricsExtension, Metrics }
+import kamon.system.sigar.SigarMetricsUpdater.UpdateSigarMetrics
+import org.hyperic.sigar.Sigar
+
+import scala.concurrent.duration.FiniteDuration
+
+class SigarMetricsUpdater(refreshInterval: FiniteDuration) extends Actor {
+ val sigar = new Sigar
+ val metricsExtension = Kamon(Metrics)(context.system)
+
+ val sigarMetrics = List(
+ CpuMetrics.register(metricsExtension),
+ FileSystemMetrics.register(metricsExtension),
+ LoadAverageMetrics.register(metricsExtension),
+ MemoryMetrics.register(metricsExtension),
+ NetworkMetrics.register(metricsExtension),
+ ProcessCpuMetrics.register(metricsExtension))
+
+ val refreshSchedule = context.system.scheduler.schedule(refreshInterval, refreshInterval, self, UpdateSigarMetrics)(context.dispatcher)
+
+ def receive = {
+ case UpdateSigarMetrics ⇒ updateMetrics()
+ }
+
+ def updateMetrics(): Unit = {
+ sigarMetrics.foreach(_.update(sigar))
+ }
+
+ override def postStop(): Unit = {
+ refreshSchedule.cancel()
+ super.postStop()
+ }
+}
+
+object SigarMetricsUpdater {
+ def props(refreshInterval: FiniteDuration): Props =
+ Props(new SigarMetricsUpdater((refreshInterval)))
+
+ case object UpdateSigarMetrics
+}
+
+trait SigarMetric extends EntityRecorder {
+ def update(sigar: Sigar): Unit
+}
+
+abstract class SigarMetricRecorderCompanion(metricName: String) {
+ def register(metricsExtension: MetricsExtension): SigarMetric = {
+ val instrumentFactory = metricsExtension.instrumentFactory("system-metric")
+ metricsExtension.register(Entity(metricName, "system-metric"), apply(instrumentFactory)).recorder
+ }
+
+ def apply(instrumentFactory: InstrumentFactory): SigarMetric
+}
+
diff --git a/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
index c9530160..4d633952 100644
--- a/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
+++ b/kamon-system-metrics/src/test/scala/kamon/metrics/SystemMetricsSpec.scala
@@ -15,406 +15,140 @@
package kamon.metric
-import akka.actor.ActorSystem
-import akka.testkit.{ TestKitBase, TestProbe }
-import com.typesafe.config.ConfigFactory
-import kamon.Kamon
-import kamon.metric.Subscriptions.TickMetricSnapshot
-import kamon.metrics.CPUMetrics.CPUMetricSnapshot
-import kamon.metrics.ClassLoadingMetrics.ClassLoadingMetricSnapshot
-import kamon.metrics.ContextSwitchesMetrics.ContextSwitchesMetricsSnapshot
-import kamon.metrics.DiskMetrics.DiskMetricsSnapshot
-import kamon.metrics.GCMetrics.GCMetricSnapshot
-import kamon.metrics.HeapMetrics.HeapMetricSnapshot
-import kamon.metrics.LoadAverageMetrics.LoadAverageMetricsSnapshot
-import kamon.metrics.MemoryMetrics.MemoryMetricSnapshot
-import kamon.metrics.NetworkMetrics.NetworkMetricSnapshot
-import kamon.metrics.NonHeapMetrics.NonHeapMetricSnapshot
-import kamon.metrics.ProcessCPUMetrics.ProcessCPUMetricsSnapshot
-import kamon.metrics.ThreadMetrics.ThreadMetricSnapshot
-import kamon.metrics._
-import kamon.system.SystemMetricsExtension
-import org.scalatest.{ Matchers, WordSpecLike }
-
-import scala.concurrent.duration._
-
-class SystemMetricsSpec extends TestKitBase with WordSpecLike with Matchers with RedirectLogging {
- implicit lazy val system: ActorSystem = ActorSystem("system-metrics-spec", ConfigFactory.parseString(
- """
- |akka {
- | extensions = ["kamon.system.SystemMetrics"]
- |}
- |
- |kamon.metrics {
- | disable-aspectj-weaver-missing-error = true
- | tick-interval = 1 second
- |}
- """.stripMargin))
-
- "the Kamon CPU Metrics" should {
- "record user, system, wait, idle, stolen metrics" in new CPUMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val CPUMetrics = expectCPUMetrics(metricsListener, 3 seconds)
- CPUMetrics.user.max should be >= 0L
- CPUMetrics.system.max should be >= 0L
- CPUMetrics.cpuWait.max should be >= 0L
- CPUMetrics.idle.max should be >= 0L
- CPUMetrics.stolen.max should be >= 0L
- }
- }
- "the Kamon GC Metrics" should {
- "record count, time metrics" in new GCMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val GCMetrics = expectGCMetrics(metricsListener, 3 seconds)
- GCMetrics.count.max should be >= 0L
- GCMetrics.time.max should be >= 0L
- }
- }
-
- "the Kamon Heap Metrics" should {
- "record used, max, commited metrics" in new HeapMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val HeapMetrics = expectHeapMetrics(metricsListener, 3 seconds)
- HeapMetrics.used.max should be >= 0L
- HeapMetrics.max.max should be >= 0L
- HeapMetrics.committed.max should be >= 0L
- }
- }
-
- "the Kamon Non-Heap Metrics" should {
- "record used, max, commited metrics" in new NonHeapMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val NonHeapMetrics = expectNonHeapMetrics(metricsListener, 3 seconds)
- NonHeapMetrics.used.max should be >= 0L
- NonHeapMetrics.max.max should be >= 0L
- NonHeapMetrics.committed.max should be >= 0L
- }
- }
-
- "the Kamon Thread Metrics" should {
- "record daemon, count, peak metrics" in new ThreadMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val ThreadMetrics = expectThreadMetrics(metricsListener, 3 seconds)
- ThreadMetrics.daemon.max should be >= 0L
- ThreadMetrics.count.max should be >= 0L
- ThreadMetrics.peak.max should be >= 0L
- }
- }
-
- "the Kamon ClassLoading Metrics" should {
- "record loaded, unloaded, current metrics" in new ClassLoadingMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+import java.lang.management.ManagementFactory
- val ClassLoadingMetrics = expectClassLoadingMetrics(metricsListener, 3 seconds)
- ClassLoadingMetrics.loaded.max should be >= 0L
- ClassLoadingMetrics.unloaded.max should be >= 0L
- ClassLoadingMetrics.current.max should be >= 0L
- }
- }
-
- "the Kamon Disk Metrics" should {
- "record reads, writes, queue, service time metrics" in new DiskMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val DiskMetrics = expectDiskMetrics(metricsListener, 3 seconds)
- DiskMetrics.reads.max should be >= 0L
- DiskMetrics.writes.max should be >= 0L
- DiskMetrics.queue.max should be >= 0L
- DiskMetrics.serviceTime.max should be >= 0L
- }
- }
-
- "the Kamon Load Average Metrics" should {
- "record 1 minute, 5 minutes and 15 minutes metrics" in new LoadAverageMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val LoadAverageMetrics = expectLoadAverageMetrics(metricsListener, 3 seconds)
- LoadAverageMetrics.one.max should be >= 0L
- LoadAverageMetrics.five.max should be >= 0L
- LoadAverageMetrics.fifteen.max should be >= 0L
- }
- }
+import com.typesafe.config.ConfigFactory
+import kamon.system.jmx.GarbageCollectionMetrics
+import kamon.testkit.BaseKamonSpec
+import scala.collection.JavaConverters._
- "the Kamon Memory Metrics" should {
- "record used, free, buffer, cache, swap used, swap free metrics" in new MemoryMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val MemoryMetrics = expectMemoryMetrics(metricsListener, 3 seconds)
- MemoryMetrics.used.max should be >= 0L
- MemoryMetrics.free.max should be >= 0L
- MemoryMetrics.buffer.max should be >= 0L
- MemoryMetrics.cache.max should be >= 0L
- MemoryMetrics.swapUsed.max should be >= 0L
- MemoryMetrics.swapFree.max should be >= 0L
- }
- }
+class SystemMetricsSpec extends BaseKamonSpec("system-metrics-spec") with RedirectLogging {
- "the Kamon Network Metrics" should {
- "record rxBytes, txBytes, rxErrors, txErrors, rxDropped, txDropped metrics" in new NetworkMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
-
- val NetworkMetrics = expectNetworkMetrics(metricsListener, 3 seconds)
- NetworkMetrics.rxBytes.max should be >= 0L
- NetworkMetrics.txBytes.max should be >= 0L
- NetworkMetrics.rxErrors.max should be >= 0L
- NetworkMetrics.txErrors.max should be >= 0L
- NetworkMetrics.rxDropped.max should be >= 0L
- NetworkMetrics.txDropped.max should be >= 0L
- }
- }
-
- "the Kamon Process CPU Metrics" should {
- "record Cpu Percent, Total Process Time metrics" in new ProcessCPUMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ override lazy val config =
+ ConfigFactory.parseString(
+ """
+ |kamon.metric {
+ | tick-interval = 1 hour
+ |}
+ |
+ |akka {
+ | extensions = ["kamon.system.SystemMetrics"]
+ |}
+ """.stripMargin)
- val ProcessCPUMetrics = expectProcessCPUMetrics(metricsListener, 3 seconds)
- ProcessCPUMetrics.cpuPercent.max should be >= 0L
- ProcessCPUMetrics.totalProcessTime.max should be >= 0L
- }
- }
+ override protected def beforeAll(): Unit =
+ Thread.sleep(2000) // Give some room to the recorders to store some values.
- "the Kamon ContextSwitches Metrics" should {
- "record Context Switches Global, Voluntary and Non Voluntary metrics" in new ContextSwitchesMetricsListenerFixture {
- val metricsListener = subscribeToMetrics()
+ "the Kamon System Metrics module" should {
+ "record user, system, wait, idle and stolen CPU metrics" in {
+ val cpuMetrics = takeSnapshotOf("cpu", "system-metric")
- val ContextSwitchesMetrics = expectContextSwitchesMetrics(metricsListener, 3 seconds)
- ContextSwitchesMetrics.perProcessVoluntary.max should be >= 0L
- ContextSwitchesMetrics.perProcessNonVoluntary.max should be >= 0L
- ContextSwitchesMetrics.global.max should be >= 0L
+ cpuMetrics.histogram("cpu-user").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-system").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-wait").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-idle").get.numberOfMeasurements should be > 0L
+ cpuMetrics.histogram("cpu-stolen").get.numberOfMeasurements should be > 0L
}
- }
- def expectCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): CPUMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val cpuMetricsOption = tickSnapshot.metrics.get(CPUMetrics(SystemMetricsExtension.CPU))
- cpuMetricsOption should not be empty
- cpuMetricsOption.get.asInstanceOf[CPUMetricSnapshot]
- }
+ "record count and time garbage collection metrics" in {
+ val availableGarbageCollectors = ManagementFactory.getGarbageCollectorMXBeans.asScala.filter(_.isValid)
- trait CPUMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(CPUMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ for (collectorName ← availableGarbageCollectors) {
+ val sanitizedName = GarbageCollectionMetrics.sanitizeCollectorName(collectorName.getName)
+ val collectorMetrics = takeSnapshotOf(s"$sanitizedName-garbage-collector", "system-metric")
- def expectGCMetrics(listener: TestProbe, waitTime: FiniteDuration): GCMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ collectorMetrics.gauge("garbage-collection-count").get.numberOfMeasurements should be > 0L
+ collectorMetrics.gauge("garbage-collection-time").get.numberOfMeasurements should be > 0L
+ }
}
- val gcMetricsOption = tickSnapshot.metrics.get(GCMetrics(SystemMetricsExtension.garbageCollectors(0).getName.replaceAll("""[^\w]""", "-")))
- gcMetricsOption should not be empty
- gcMetricsOption.get.asInstanceOf[GCMetricSnapshot]
- }
+ "record used, max and committed heap metrics" in {
+ val heapMetrics = takeSnapshotOf("heap-memory", "system-metric")
- trait GCMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(GCMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ heapMetrics.gauge("heap-used").get.numberOfMeasurements should be > 0L
+ heapMetrics.gauge("heap-max").get.numberOfMeasurements should be > 0L
+ heapMetrics.gauge("heap-committed").get.numberOfMeasurements should be > 0L
}
- }
- def expectHeapMetrics(listener: TestProbe, waitTime: FiniteDuration): HeapMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val heapMetricsOption = tickSnapshot.metrics.get(HeapMetrics(SystemMetricsExtension.Heap))
- heapMetricsOption should not be empty
- heapMetricsOption.get.asInstanceOf[HeapMetricSnapshot]
- }
+ "record used, max and committed non-heap metrics" in {
+ val nonHeapMetrics = takeSnapshotOf("non-heap-memory", "system-metric")
- trait HeapMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(HeapMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ nonHeapMetrics.gauge("non-heap-used").get.numberOfMeasurements should be > 0L
+ nonHeapMetrics.gauge("non-heap-max").get.numberOfMeasurements should be > 0L
+ nonHeapMetrics.gauge("non-heap-committed").get.numberOfMeasurements should be > 0L
}
- }
- trait NonHeapMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(NonHeapMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ "record daemon, count and peak jvm threads metrics" in {
+ val threadsMetrics = takeSnapshotOf("threads", "system-metric")
- def expectNonHeapMetrics(listener: TestProbe, waitTime: FiniteDuration): NonHeapMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ threadsMetrics.gauge("daemon-thread-count").get.numberOfMeasurements should be > 0L
+ threadsMetrics.gauge("peak-thread-count").get.numberOfMeasurements should be > 0L
+ threadsMetrics.gauge("thread-count").get.numberOfMeasurements should be > 0L
}
- val nonHeapMetricsOption = tickSnapshot.metrics.get(NonHeapMetrics(SystemMetricsExtension.NonHeap))
- nonHeapMetricsOption should not be empty
- nonHeapMetricsOption.get.asInstanceOf[NonHeapMetricSnapshot]
- }
- trait ThreadMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ThreadMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ "record loaded, unloaded and current class loading metrics" in {
+ val classLoadingMetrics = takeSnapshotOf("class-loading", "system-metric")
- def expectThreadMetrics(listener: TestProbe, waitTime: FiniteDuration): ThreadMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ classLoadingMetrics.gauge("classes-loaded").get.numberOfMeasurements should be > 0L
+ classLoadingMetrics.gauge("classes-unloaded").get.numberOfMeasurements should be > 0L
+ classLoadingMetrics.gauge("classes-currently-loaded").get.numberOfMeasurements should be > 0L
}
- val threadMetricsOption = tickSnapshot.metrics.get(ThreadMetrics(SystemMetricsExtension.Threads))
- threadMetricsOption should not be empty
- threadMetricsOption.get.asInstanceOf[ThreadMetricSnapshot]
- }
- trait ClassLoadingMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ClassLoadingMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ "record reads, writes, queue time and service time file system metrics" in {
+ val fileSystemMetrics = takeSnapshotOf("file-system", "system-metric")
- def expectClassLoadingMetrics(listener: TestProbe, waitTime: FiniteDuration): ClassLoadingMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ fileSystemMetrics.histogram("file-system-reads").get.numberOfMeasurements should be > 0L
+ fileSystemMetrics.histogram("file-system-writes").get.numberOfMeasurements should be > 0L
}
- val classLoadingMetricsOption = tickSnapshot.metrics.get(ClassLoadingMetrics(SystemMetricsExtension.Classes))
- classLoadingMetricsOption should not be empty
- classLoadingMetricsOption.get.asInstanceOf[ClassLoadingMetricSnapshot]
- }
- trait DiskMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(DiskMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ "record 1 minute, 5 minutes and 15 minutes metrics load average metrics" in {
+ val loadAverage = takeSnapshotOf("load-average", "system-metric")
- def expectDiskMetrics(listener: TestProbe, waitTime: FiniteDuration): DiskMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ loadAverage.histogram("one-minute").get.numberOfMeasurements should be > 0L
+ loadAverage.histogram("five-minutes").get.numberOfMeasurements should be > 0L
+ loadAverage.histogram("fifteen-minutes").get.numberOfMeasurements should be > 0L
}
- val diskMetricsOption = tickSnapshot.metrics.get(DiskMetrics(SystemMetricsExtension.Disk))
- diskMetricsOption should not be empty
- diskMetricsOption.get.asInstanceOf[DiskMetricsSnapshot]
- }
- trait LoadAverageMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(LoadAverageMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
+ "record used, free, swap used, swap free system memory metrics" in {
+ val memoryMetrics = takeSnapshotOf("memory", "system-metric")
- def expectLoadAverageMetrics(listener: TestProbe, waitTime: FiniteDuration): LoadAverageMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
+ memoryMetrics.histogram("memory-used").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("memory-free").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("swap-used").get.numberOfMeasurements should be > 0L
+ memoryMetrics.histogram("swap-free").get.numberOfMeasurements should be > 0L
}
- val loadAverageMetricsOption = tickSnapshot.metrics.get(LoadAverageMetrics(SystemMetricsExtension.LoadAverage))
- loadAverageMetricsOption should not be empty
- loadAverageMetricsOption.get.asInstanceOf[LoadAverageMetricsSnapshot]
- }
- def expectMemoryMetrics(listener: TestProbe, waitTime: FiniteDuration): MemoryMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val memoryMetricsOption = tickSnapshot.metrics.get(MemoryMetrics(SystemMetricsExtension.Memory))
- memoryMetricsOption should not be empty
- memoryMetricsOption.get.asInstanceOf[MemoryMetricSnapshot]
- }
+ "record rxBytes, txBytes, rxErrors, txErrors, rxDropped, txDropped network metrics" in {
+ val networkMetrics = takeSnapshotOf("network", "system-metric")
- trait MemoryMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(MemoryMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ networkMetrics.histogram("tx-bytes").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-bytes").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("tx-errors").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-errors").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("tx-dropped").get.numberOfMeasurements should be > 0L
+ networkMetrics.histogram("rx-dropped").get.numberOfMeasurements should be > 0L
}
- }
- def expectNetworkMetrics(listener: TestProbe, waitTime: FiniteDuration): NetworkMetricSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val networkMetricsOption = tickSnapshot.metrics.get(NetworkMetrics(SystemMetricsExtension.Network))
- networkMetricsOption should not be empty
- networkMetricsOption.get.asInstanceOf[NetworkMetricSnapshot]
- }
+ "record system and user CPU percentage for the application process" in {
+ val processCpuMetrics = takeSnapshotOf("process-cpu", "system-metric")
- trait NetworkMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(NetworkMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ processCpuMetrics.histogram("process-user-cpu").get.numberOfMeasurements should be > 0L
+ processCpuMetrics.histogram("process-system-cpu").get.numberOfMeasurements should be > 0L
+ processCpuMetrics.histogram("process-cpu").get.numberOfMeasurements should be > 0L
}
- }
- def expectProcessCPUMetrics(listener: TestProbe, waitTime: FiniteDuration): ProcessCPUMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val processCPUMetricsOption = tickSnapshot.metrics.get(ProcessCPUMetrics(SystemMetricsExtension.ProcessCPU))
- processCPUMetricsOption should not be empty
- processCPUMetricsOption.get.asInstanceOf[ProcessCPUMetricsSnapshot]
- }
+ "record Context Switches Global, Voluntary and Non Voluntary metrics when running on Linux" in {
+ if (isLinux) {
+ val contextSwitchesMetrics = takeSnapshotOf("context-switches", "system-metric")
- trait ProcessCPUMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ProcessCPUMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
+ contextSwitchesMetrics.histogram("context-switches-process-voluntary").get.numberOfMeasurements should be > 0L
+ contextSwitchesMetrics.histogram("context-switches-process-non-voluntary").get.numberOfMeasurements should be > 0L
+ contextSwitchesMetrics.histogram("context-switches-global").get.numberOfMeasurements should be > 0L
+ }
}
}
- def expectContextSwitchesMetrics(listener: TestProbe, waitTime: FiniteDuration): ContextSwitchesMetricsSnapshot = {
- val tickSnapshot = within(waitTime) {
- listener.expectMsgType[TickMetricSnapshot]
- }
- val contextSwitchesMetricsOption = tickSnapshot.metrics.get(ContextSwitchesMetrics(SystemMetricsExtension.ContextSwitches))
- contextSwitchesMetricsOption should not be empty
- contextSwitchesMetricsOption.get.asInstanceOf[ContextSwitchesMetricsSnapshot]
- }
+ def isLinux: Boolean =
+ System.getProperty("os.name").indexOf("Linux") != -1
- trait ContextSwitchesMetricsListenerFixture {
- def subscribeToMetrics(): TestProbe = {
- val metricsListener = TestProbe()
- Kamon(Metrics).subscribe(ContextSwitchesMetrics, "*", metricsListener.ref, permanently = true)
- // Wait for one empty snapshot before proceeding to the test.
- metricsListener.expectMsgType[TickMetricSnapshot]
- metricsListener
- }
- }
}
diff --git a/kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala b/kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala
index b7050c59..2f77df95 100644
--- a/kamon-core/src/main/scala/kamon/AkkaExtensionSwap.scala
+++ b/kamon-testkit/src/main/scala/testkit/AkkaExtensionSwap.scala
@@ -14,11 +14,12 @@
* =========================================================================================
*/
-package kamon
+package testkit
-import akka.actor.{ Extension, ActorSystem, ExtensionId }
import java.util.concurrent.ConcurrentHashMap
+import akka.actor.{ ActorSystem, Extension, ExtensionId }
+
object AkkaExtensionSwap {
def swap(system: ActorSystem, key: ExtensionId[_], value: Extension): Unit = {
val extensionsField = system.getClass.getDeclaredField("extensions")
diff --git a/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala b/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
index 825cc718..9e736971 100644
--- a/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
+++ b/kamon-testkit/src/main/scala/testkit/TestProbeInstrumentation.scala
@@ -17,7 +17,7 @@
package akka.testkit
import org.aspectj.lang.annotation._
-import kamon.trace.{ EmptyTraceContext, TraceContextAware, TraceRecorder }
+import kamon.trace.{ EmptyTraceContext, TraceContextAware, TraceContext }
import org.aspectj.lang.ProceedingJoinPoint
import akka.testkit.TestActor.RealMessage
@@ -46,7 +46,7 @@ class TestProbeInstrumentation {
case _ ⇒ EmptyTraceContext
}
- TraceRecorder.withTraceContext(traceContext) {
+ TraceContext.withContext(traceContext) {
pjp.proceed
}
}
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index 6b085067..71054506 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -35,7 +35,7 @@ object Dependencies {
val logback = "ch.qos.logback" % "logback-classic" % "1.0.13"
val aspectJ = "org.aspectj" % "aspectjweaver" % aspectjVersion
val newrelic = "com.newrelic.agent.java" % "newrelic-api" % "3.11.0"
- val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.0.3"
+ val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.3"
val sprayCan = "io.spray" %% "spray-can" % sprayVersion
val sprayRouting = "io.spray" %% "spray-routing" % sprayVersion
val sprayTestkit = "io.spray" %% "spray-testkit" % sprayVersion
diff --git a/project/Projects.scala b/project/Projects.scala
index a8d4b526..2004ac07 100644
--- a/project/Projects.scala
+++ b/project/Projects.scala
@@ -29,7 +29,7 @@ object Projects extends Build {
.settings(noPublishing: _*)
- lazy val kamonCore = Project("kamon-core", file("kamon-core"))
+ lazy val kamonCore: Project = Project("kamon-core", file("kamon-core"))
.dependsOn(kamonMacros % "compile-internal, test-internal")
.settings(basicSettings: _*)
.settings(formatSettings: _*)
@@ -41,21 +41,36 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor, hdrHistogram) ++
provided(aspectJ) ++
- optional(logback, scalazConcurrent) ++
+ optional(logback) ++
test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
lazy val kamonAkka = Project("kamon-akka", file("kamon-akka"))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
.dependsOn(kamonMacros % "compile-internal, test-internal")
+ .dependsOn(kamonScala)
.settings(basicSettings: _* )
.settings(formatSettings: _*)
.settings(aspectJSettings: _*)
.settings(
libraryDependencies ++=
compile(akkaActor) ++
+ provided(aspectJ) ++
+ optional(logback) ++
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+
+
+ lazy val kamonScala = Project("kamon-scala", file("kamon-scala"))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonMacros % "compile-internal, test-internal")
+ .settings(basicSettings: _* )
+ .settings(formatSettings: _*)
+ .settings(aspectJSettings: _*)
+ .settings(
+ libraryDependencies ++=
+ compile() ++
provided(aspectJ) ++
- optional(logback) ++
+ optional(scalazConcurrent) ++
test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
lazy val kamonAkkaRemote = Project("kamon-akka-remote", file("kamon-akka-remote"))
@@ -67,7 +82,7 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaRemote, akkaCluster) ++
provided(aspectJ) ++
- test(scalatest, akkaTestKit))
+ test(scalatest, akkaTestKit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
lazy val kamonSpray = Project("kamon-spray", file("kamon-spray"))
@@ -81,8 +96,9 @@ object Projects extends Build {
libraryDependencies ++=
compile(akkaActor, sprayCan, sprayClient, sprayRouting) ++
provided(aspectJ) ++
- test(scalatest, akkaTestKit, sprayTestkit, slf4Api, slf4nop))
- .dependsOn(kamonCore)
+ test(scalatest, akkaTestKit, sprayTestkit, akkaSlf4j, slf4Jul, slf4Log4j, logback))
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonAkka)
.dependsOn(kamonTestkit % "test")
@@ -96,6 +112,7 @@ object Projects extends Build {
provided(aspectJ) ++
test(scalatest, akkaTestKit, sprayTestkit, slf4Api, akkaSlf4j))
.dependsOn(kamonCore)
+ .dependsOn(kamonTestkit % "compile->compile;test->test")
lazy val kamonPlayground = Project("kamon-playground", file("kamon-playground"))
@@ -137,7 +154,9 @@ object Projects extends Build {
compile(play, playWS) ++
provided(aspectJ) ++
test(playTest, akkaTestKit, slf4Api))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
+ .dependsOn(kamonScala)
+ .dependsOn(kamonAkka)
lazy val kamonStatsD = Project("kamon-statsd", file("kamon-statsd"))
.settings(basicSettings: _*)
@@ -167,7 +186,6 @@ object Projects extends Build {
compile(akkaActor) ++
test(scalatest, akkaTestKit, slf4Api, slf4nop))
.dependsOn(kamonCore)
- .dependsOn(kamonSystemMetrics % "provided")
lazy val kamonMacros = Project("kamon-macros", file("kamon-macros"))
.settings(basicSettings: _*)
@@ -183,7 +201,7 @@ object Projects extends Build {
libraryDependencies ++=
compile(sigarLoader) ++
test(scalatest, akkaTestKit, slf4Api, slf4Jul, slf4Log4j, logback))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
lazy val kamonJdbc = Project("kamon-jdbc", file("kamon-jdbc"))
.settings(basicSettings: _*)
@@ -193,7 +211,7 @@ object Projects extends Build {
libraryDependencies ++=
test(h2,scalatest, akkaTestKit, slf4Api) ++
provided(aspectJ))
- .dependsOn(kamonCore)
+ .dependsOn(kamonCore % "compile->compile;test->test")
val noPublishing = Seq(publish := (), publishLocal := (), publishArtifact := false)
}